andrewgleave commited on
Commit
b8f5c23
1 Parent(s): 1d0e96d
Files changed (2) hide show
  1. app.py +15 -16
  2. chain.py +6 -17
app.py CHANGED
@@ -14,7 +14,11 @@ from chain import get_chain
14
 
15
  STORE_DIR = "store"
16
  YOUTUBE_EMBED_TEMPLATE = """
17
- <iframe width="354" height="200" src="{source}" title="YouTube video player" frameborder="0"
 
 
 
 
18
  allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen>
19
  </iframe>"""
20
 
@@ -80,8 +84,11 @@ def chat(inp, history, agent):
80
  return history, history, source_html
81
 
82
 
83
- block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
84
- with block:
 
 
 
85
  gr.Markdown("<h3><center>ToKBot🤖 - Ask ToKCast Questions</center></h3>")
86
  openai_api_key_textbox = gr.Textbox(
87
  placeholder="Paste your OpenAI API key (sk-...)",
@@ -90,13 +97,8 @@ with block:
90
  type="password",
91
  )
92
 
 
93
  chatbot = gr.Chatbot()
94
- gr.Markdown("<h3>Excerpts</h3>")
95
- sources = gr.HTML(
96
- """<div style="min-height:200px;display:flex;align-items:center;justify-content:center;">
97
- <h3 style="text-align:center;color:#555;font-size:2rem;">No videos</h3>
98
- </div>"""
99
- )
100
  with gr.Row():
101
  message = gr.Textbox(
102
  label="What's your question?",
@@ -107,24 +109,21 @@ with block:
107
 
108
  gr.Examples(
109
  examples=[
110
- "What is a beginning of infinity?",
111
- "How do memes differ from genes in how they replicate?",
112
  "What is the nature of knowledge and how does it grow?",
113
  ],
114
  inputs=message,
115
  )
116
 
117
  gr.HTML(
118
- """A GPT-3/LangChain bot that answers questions about the TokCast podcast provides relevant video excerpts"""
119
  )
120
 
121
  gr.HTML(
122
  "<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
123
  )
124
 
125
- state = gr.State()
126
- agent_state = gr.State()
127
-
128
  submit.click(
129
  chat,
130
  inputs=[message, state, agent_state],
@@ -142,4 +141,4 @@ with block:
142
  outputs=[agent_state],
143
  )
144
 
145
- block.launch()
 
14
 
15
  STORE_DIR = "store"
16
  YOUTUBE_EMBED_TEMPLATE = """
17
+ <iframe width="354"
18
+ height="200"
19
+ src="{source}&cc_load_policy=1"
20
+ title="YouTube video player"
21
+ frameborder="0"
22
  allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen>
23
  </iframe>"""
24
 
 
84
  return history, history, source_html
85
 
86
 
87
+ with gr.Blocks(css=".gradio-container {background-color: lightgray}") as demo:
88
+
89
+ state = gr.State()
90
+ agent_state = gr.State()
91
+
92
  gr.Markdown("<h3><center>ToKBot🤖 - Ask ToKCast Questions</center></h3>")
93
  openai_api_key_textbox = gr.Textbox(
94
  placeholder="Paste your OpenAI API key (sk-...)",
 
97
  type="password",
98
  )
99
 
100
+ sources = gr.HTML()
101
  chatbot = gr.Chatbot()
 
 
 
 
 
 
102
  with gr.Row():
103
  message = gr.Textbox(
104
  label="What's your question?",
 
109
 
110
  gr.Examples(
111
  examples=[
112
+ 'What does "the beginning of infinity" refer to?',
113
+ "How do memes differ from genes in their replication?",
114
  "What is the nature of knowledge and how does it grow?",
115
  ],
116
  inputs=message,
117
  )
118
 
119
  gr.HTML(
120
+ """<p>A GPT-3/LangChain bot that answers questions about the TokCast podcast provides relevant video excerpts</p>"""
121
  )
122
 
123
  gr.HTML(
124
  "<center>Powered by <a href='https://github.com/hwchase17/langchain'>LangChain 🦜️🔗</a></center>"
125
  )
126
 
 
 
 
127
  submit.click(
128
  chat,
129
  inputs=[message, state, agent_state],
 
141
  outputs=[agent_state],
142
  )
143
 
144
+ demo.launch()
chain.py CHANGED
@@ -6,9 +6,8 @@ from langchain.chains.base import Chain
6
  from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
7
  from langchain.chains.question_answering import load_qa_chain
8
  from langchain.prompts import FewShotPromptTemplate
9
-
10
- # from langchain.prompts.example_selector import SemanticSimilarityExampleSelector
11
  from langchain.vectorstores import FAISS
 
12
  from pydantic import BaseModel
13
 
14
 
@@ -33,20 +32,18 @@ class CustomChain(Chain, BaseModel):
33
  new_question = self.key_word_extractor.run(
34
  question=question, chat_history=chat_history_str
35
  )
36
-
37
  else:
38
  new_question = question
39
- docs = self.vstore.similarity_search(new_question, k=3)
40
  new_inputs = inputs.copy()
41
  new_inputs["question"] = new_question
42
  new_inputs["chat_history"] = chat_history_str
43
  answer, _ = self.chain.combine_docs(docs, **new_inputs)
44
- sources = ""
45
  if "SOURCES:" in answer:
46
  answer, sources = answer.split("SOURCES:")
47
- sources = sources.split(", ")
48
- answer = answer.strip()
49
- return {"answer": answer, "sources": sources}
50
 
51
 
52
  def get_chain(vectorstore: FAISS) -> Chain:
@@ -68,10 +65,6 @@ def get_chain(vectorstore: FAISS) -> Chain:
68
  {chat_history}
69
  Follow Up Input: {question}
70
  Standalone question:"""
71
- # example_selector = SemanticSimilarityExampleSelector(
72
- # vectorstore=vectorstore,
73
- # k=4,
74
- # )
75
 
76
  examples = [
77
  {
@@ -121,13 +114,9 @@ ANSWER:"""
121
  chain_type="stuff",
122
  prompt=PROMPT,
123
  document_prompt=EXAMPLE_PROMPT,
124
- verbose=True,
125
  )
126
  return CustomChain(
127
- chain=doc_chain,
128
- vstore=vectorstore,
129
- key_word_extractor=key_word_extractor,
130
- verbose=True,
131
  )
132
 
133
 
 
6
  from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
7
  from langchain.chains.question_answering import load_qa_chain
8
  from langchain.prompts import FewShotPromptTemplate
 
 
9
  from langchain.vectorstores import FAISS
10
+
11
  from pydantic import BaseModel
12
 
13
 
 
32
  new_question = self.key_word_extractor.run(
33
  question=question, chat_history=chat_history_str
34
  )
 
35
  else:
36
  new_question = question
37
+ docs = self.vstore.similarity_search(new_question, k=4)
38
  new_inputs = inputs.copy()
39
  new_inputs["question"] = new_question
40
  new_inputs["chat_history"] = chat_history_str
41
  answer, _ = self.chain.combine_docs(docs, **new_inputs)
42
+ sources = []
43
  if "SOURCES:" in answer:
44
  answer, sources = answer.split("SOURCES:")
45
+ sources = sources.split(", ")
46
+ return {"answer": answer.strip(), "sources": sources}
 
47
 
48
 
49
  def get_chain(vectorstore: FAISS) -> Chain:
 
65
  {chat_history}
66
  Follow Up Input: {question}
67
  Standalone question:"""
 
 
 
 
68
 
69
  examples = [
70
  {
 
114
  chain_type="stuff",
115
  prompt=PROMPT,
116
  document_prompt=EXAMPLE_PROMPT,
 
117
  )
118
  return CustomChain(
119
+ chain=doc_chain, vstore=vectorstore, key_word_extractor=key_word_extractor
 
 
 
120
  )
121
 
122