|
from LLM import LLM |
|
import streamlit as st |
|
|
|
def format_chat_history(chat_history): |
|
formatted_history = "" |
|
for chat in chat_history: |
|
formatted_history += f"{chat[0]}: {chat[1]}\n" |
|
return formatted_history |
|
|
|
def main(): |
|
st.title("LLM Chat") |
|
|
|
model = "gpt2" |
|
llm = LLM(model) |
|
|
|
chat_history = [] |
|
context = "You are an helpfully assistant in a school. You are helping a student with his homework." |
|
chat = llm.get_chat(context=context) |
|
user_input = st.text_input("User:") |
|
button = st.button("Send") |
|
chat_area = st.empty() |
|
|
|
while True: |
|
print(user_input) |
|
if button: |
|
if user_input: |
|
chat_history.append(("User", user_input)) |
|
bot_response = chat.answerStoreHistory(qn=user_input) |
|
chat_history.append(("Bot", bot_response)) |
|
print(chat_history) |
|
|
|
chat_area.text(format_chat_history(chat_history)) |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|