Spaces:
Sleeping
Sleeping
karthiksagarn
commited on
Commit
•
546eeec
1
Parent(s):
99a828c
Update app.py
Browse files
app.py
CHANGED
@@ -2,8 +2,15 @@ import streamlit as st
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
|
|
7 |
# model = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
|
8 |
model = AutoModelForCausalLM.from_pretrained("llama-2-7b-chat.bin")
|
9 |
|
|
|
2 |
from langchain.llms import CTransformers
|
3 |
from langchain.prompts import PromptTemplate
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
+
from dotenv import load_dotenv
|
6 |
+
from huggingface_hub import login
|
7 |
+
|
8 |
+
# get environment variables
|
9 |
+
load_dotenv()
|
10 |
+
access_token = os.getenv("HUGGINGFACE_TOKEN")
|
11 |
+
login(token=access_token)
|
12 |
|
13 |
+
# from transformers import pipeline
|
14 |
# model = pipeline("text-generation", model="TheBloke/Llama-2-7B-Chat-GGML")
|
15 |
model = AutoModelForCausalLM.from_pretrained("llama-2-7b-chat.bin")
|
16 |
|