Spaces:
Sleeping
Sleeping
karthiksagarn
commited on
Commit
•
f3b4d90
1
Parent(s):
4dc91b5
Update app.py
Browse files
app.py
CHANGED
@@ -2,8 +2,10 @@ import streamlit as st
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
|
|
5 |
|
6 |
-
model =
|
|
|
7 |
|
8 |
## Function to get response form the LLama2 Model
|
9 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
+
from transformers import pipeline
|
6 |
|
7 |
+
model = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
|
8 |
+
# model = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7B-Chat-GGML")
|
9 |
|
10 |
## Function to get response form the LLama2 Model
|
11 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|