Omer Danziger
commited on
Commit
·
bb96974
1
Parent(s):
9eb8ca4
add chatbot
Browse files- app.py +11 -3
- chatbot.py +44 -0
- debug.py +4 -0
- requirements.txt +0 -0
app.py
CHANGED
@@ -16,15 +16,23 @@ def main():
|
|
16 |
chat_history = []
|
17 |
context = "You are an helpfully assistant in a school. You are helping a student with his homework."
|
18 |
chat = llm.get_chat(context=context)
|
|
|
|
|
|
|
|
|
19 |
while True:
|
20 |
-
user_input
|
21 |
-
if
|
22 |
if user_input:
|
23 |
chat_history.append(("User", user_input))
|
24 |
bot_response = chat.answerStoreHistory(qn=user_input)
|
25 |
chat_history.append(("Bot", bot_response))
|
|
|
|
|
|
|
26 |
|
27 |
-
|
|
|
28 |
|
29 |
|
30 |
|
|
|
16 |
chat_history = []
|
17 |
context = "You are an helpfully assistant in a school. You are helping a student with his homework."
|
18 |
chat = llm.get_chat(context=context)
|
19 |
+
user_input = st.text_input("User:")
|
20 |
+
button = st.button("Send")
|
21 |
+
chat_area = st.empty()
|
22 |
+
|
23 |
while True:
|
24 |
+
print(user_input)
|
25 |
+
if button:
|
26 |
if user_input:
|
27 |
chat_history.append(("User", user_input))
|
28 |
bot_response = chat.answerStoreHistory(qn=user_input)
|
29 |
chat_history.append(("Bot", bot_response))
|
30 |
+
print(chat_history)
|
31 |
+
|
32 |
+
chat_area.text(format_chat_history(chat_history))
|
33 |
|
34 |
+
if __name__ == "__main__":
|
35 |
+
main()
|
36 |
|
37 |
|
38 |
|
chatbot.py
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from streamlit_chat import message
|
3 |
+
|
4 |
+
from LLM import LLM
|
5 |
+
|
6 |
+
st.set_page_config(
|
7 |
+
page_title="Streamlit Chat - Demo",
|
8 |
+
page_icon=":robot:"
|
9 |
+
)
|
10 |
+
|
11 |
+
# API_URL = "https://api-inference.huggingface.co/models/facebook/blenderbot-400M-distill"
|
12 |
+
# headers = {"Authorization": st.secrets['api_key']}
|
13 |
+
|
14 |
+
st.header("Streamlit Chat - Demo")
|
15 |
+
# st.markdown("[Github](https://github.com/ai-yash/st-chat)")
|
16 |
+
|
17 |
+
generated = []
|
18 |
+
past = []
|
19 |
+
|
20 |
+
def query(chat, payload):
|
21 |
+
response = chat.answerStoreHistory(qn=payload)
|
22 |
+
return response
|
23 |
+
|
24 |
+
|
25 |
+
def get_text():
|
26 |
+
input_text = st.text_input("You: ", "Hello, how are you?", key="input")
|
27 |
+
return input_text
|
28 |
+
|
29 |
+
|
30 |
+
user_input = get_text()
|
31 |
+
llm = LLM("gpt2")
|
32 |
+
chat = llm.get_chat(context="You are an helpfully assistant in a school. You are helping a student with his homework.")
|
33 |
+
|
34 |
+
if user_input:
|
35 |
+
output = chat.answerStoreHistory(qn=user_input)
|
36 |
+
|
37 |
+
past.append(user_input)
|
38 |
+
generated.append(output)
|
39 |
+
|
40 |
+
if generated:
|
41 |
+
|
42 |
+
for i in range(len(generated) - 1, -1, -1):
|
43 |
+
message(generated[i], key=str(i))
|
44 |
+
message(past[i], is_user=True, key=str(i) + '_user')
|
debug.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from streamlit.web import bootstrap
|
2 |
+
|
3 |
+
real_script = 'app.py'
|
4 |
+
bootstrap.run(real_script, f'run.py {real_script}', [], {})
|
requirements.txt
CHANGED
Binary files a/requirements.txt and b/requirements.txt differ
|
|