Spaces:
Sleeping
Sleeping
karthiksagarn
commited on
Commit
•
ff78f0b
1
Parent(s):
b144cd3
Update app.py
Browse files
app.py
CHANGED
@@ -1,11 +1,13 @@
|
|
1 |
import streamlit as st
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
|
|
4 |
|
|
|
5 |
|
6 |
## Function to get response form the LLama2 Model
|
7 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|
8 |
-
llm = CTransformers(model=
|
9 |
model_type='llama',
|
10 |
config={"max_new_tokens": 256,
|
11 |
'temperature': 0.01})
|
|
|
1 |
import streamlit as st
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
+
from transformers import AutoModelForCausalLM
|
5 |
|
6 |
+
model = AutoModelForCausalLM.from_pretrained("karthiksagarn/Blog-Generation-App")
|
7 |
|
8 |
## Function to get response form the LLama2 Model
|
9 |
def getLLamaResponse(input_text, no_of_words, blog_style):
|
10 |
+
llm = CTransformers(model=model,
|
11 |
model_type='llama',
|
12 |
config={"max_new_tokens": 256,
|
13 |
'temperature': 0.01})
|