File size: 1,352 Bytes
bb96974 f1478d9 bb96974 f1478d9 bb96974 f1478d9 bb96974 f1478d9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import streamlit as st
from streamlit_chat import message
from LLM import LLM
st.set_page_config(
page_title="Streamlit Chat - Demo",
page_icon=":robot:"
)
# API_URL = "https://api-inference.huggingface.co/models/facebook/blenderbot-400M-distill"
# headers = {"Authorization": st.secrets['api_key']}
st.header("Streamlit Chat - Demo")
# st.markdown("[Github](https://github.com/ai-yash/st-chat)")
if 'generated' not in st.session_state:
st.session_state['generated'] = ['new']
if 'past' not in st.session_state:
st.session_state['past'] = ['new']
def query(chat, payload):
response = chat.answerStoreHistory(qn=payload)
return response
def get_text():
input_text = st.text_input("You: ", "Hello, how are you?", key="input")
return input_text
user_input = get_text()
llm = LLM("gpt2")
chat = llm.get_chat(context="You are an helpfully assistant in a school. You are helping a student with his homework.")
if user_input:
output = chat.answerStoreHistory(qn=user_input)
st.session_state.past.append(user_input)
st.session_state.generated.append(output)
if st.session_state['generated']:
for i in range(len(st.session_state['generated'])-1, -1, -1):
message(st.session_state["generated"][i], key=str(i))
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
|