Spaces:
Sleeping
Sleeping
karthiksagarn
commited on
Commit
•
99a828c
1
Parent(s):
9195fec
Update app.py
Browse files
app.py
CHANGED
@@ -2,10 +2,10 @@ import streamlit as st
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
-
from transformers import pipeline
|
6 |
|
7 |
-
model = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
|
8 |
-
|
9 |
|
10 |
## Function to get response form the LLama2 Model
|
11 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
+
# from transformers import pipeline
|
6 |
|
7 |
+
# model = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
|
8 |
+
model = AutoModelForCausalLM.from_pretrained("llama-2-7b-chat.bin")
|
9 |
|
10 |
## Function to get response form the LLama2 Model
|
11 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|