Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,27 +3,27 @@ import os
|
|
3 |
|
4 |
import gradio as gr
|
5 |
import langchain
|
6 |
-
import
|
7 |
from langchain.vectorstores import Weaviate
|
|
|
8 |
|
9 |
from chain import get_new_chain1
|
10 |
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
client = weaviate.Client(
|
16 |
-
url=WEAVIATE_URL,
|
17 |
-
additional_headers={"X-OpenAI-Api-Key": os.environ["OPENAI_API_KEY"]},
|
18 |
-
)
|
19 |
-
return Weaviate(client, "Paragraph", "content", attributes=["source"])
|
20 |
|
21 |
|
22 |
def set_openai_api_key(api_key, agent):
|
23 |
if api_key:
|
24 |
os.environ["OPENAI_API_KEY"] = api_key
|
25 |
-
vectorstore =
|
26 |
-
|
|
|
|
|
|
|
|
|
27 |
os.environ["OPENAI_API_KEY"] = ""
|
28 |
return qa_chain
|
29 |
|
@@ -47,7 +47,7 @@ block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
|
|
47 |
|
48 |
with block:
|
49 |
with gr.Row():
|
50 |
-
gr.Markdown("<h3><center>
|
51 |
|
52 |
openai_api_key_textbox = gr.Textbox(
|
53 |
placeholder="Paste your OpenAI API key (sk-...)",
|
@@ -68,20 +68,20 @@ with block:
|
|
68 |
|
69 |
gr.Examples(
|
70 |
examples=[
|
71 |
-
"
|
72 |
-
"How do I
|
73 |
-
"
|
74 |
],
|
75 |
inputs=message,
|
76 |
)
|
77 |
|
78 |
gr.HTML(
|
79 |
"""
|
80 |
-
This simple application
|
81 |
)
|
82 |
|
83 |
gr.HTML(
|
84 |
-
"<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
|
85 |
)
|
86 |
|
87 |
state = gr.State()
|
|
|
3 |
|
4 |
import gradio as gr
|
5 |
import langchain
|
6 |
+
import pickle
|
7 |
from langchain.vectorstores import Weaviate
|
8 |
+
from langchain import OpenAI
|
9 |
|
10 |
from chain import get_new_chain1
|
11 |
|
12 |
+
def get_faiss_store():
|
13 |
+
with open("docs.pkl", 'rb') as f:
|
14 |
+
faiss_store = pickle.load(f)
|
15 |
+
return faiss_store
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
|
18 |
def set_openai_api_key(api_key, agent):
|
19 |
if api_key:
|
20 |
os.environ["OPENAI_API_KEY"] = api_key
|
21 |
+
vectorstore = get_faiss_store()
|
22 |
+
|
23 |
+
rephraser_llm = OpenAI(model_name="text-davinci-003", temperature=0)
|
24 |
+
final_output_llm = OpenAI(model_name="text-davinci-003", temperature=0, max_tokens=-1)
|
25 |
+
|
26 |
+
qa_chain = get_new_chain1(vectorstore, rephraser_llm, final_output_llm)
|
27 |
os.environ["OPENAI_API_KEY"] = ""
|
28 |
return qa_chain
|
29 |
|
|
|
47 |
|
48 |
with block:
|
49 |
with gr.Row():
|
50 |
+
gr.Markdown("<h3><center>Hugging Face Doc Search</center></h3>")
|
51 |
|
52 |
openai_api_key_textbox = gr.Textbox(
|
53 |
placeholder="Paste your OpenAI API key (sk-...)",
|
|
|
68 |
|
69 |
gr.Examples(
|
70 |
examples=[
|
71 |
+
"How do I install transformers?",
|
72 |
+
"How do I load pretrained instances with an AutoClass?",
|
73 |
+
"How do I fine-tune a pretrained model?",
|
74 |
],
|
75 |
inputs=message,
|
76 |
)
|
77 |
|
78 |
gr.HTML(
|
79 |
"""
|
80 |
+
This simple application uses Langchain, an LLM, and FAISS to do Q&A over the Hugging Face Documentation."""
|
81 |
)
|
82 |
|
83 |
gr.HTML(
|
84 |
+
"<center>Powered by <a href='huggingface.co'>Hugging Face 🤗</a> and <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
|
85 |
)
|
86 |
|
87 |
state = gr.State()
|