import streamlit as st
from streamlit_chat import message
from langchain import HuggingFaceHub, ConversationChain
from langchain.chains.conversation.memory import ConversationBufferMemory, ConversationSummaryMemory
from langchain.memory import ConversationBufferWindowMemory
import os
# Get API key from environment variable
hf_api_key = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
# Initialize session state variables
if 'conversation' not in st.session_state:
st.session_state['conversation'] = None
if 'messages' not in st.session_state:
st.session_state['messages'] = []
if 'API_Key' not in st.session_state:
st.session_state['API_Key'] = '' # No need for API key input in this case
# Setting page title and header
st.set_page_config(page_title="Chat GPT Clone", page_icon=":robot_face:")
st.markdown("
How can I assist you?
", unsafe_allow_html=True)
# Sidebar (no API key input needed)
st.sidebar.title("Options")
# Summarization Button
summarise_button = st.sidebar.button("Summarise the conversation", key="summarise")
if summarise_button:
with st.spinner("Summarizing..."): # Add a spinner for visual feedback
# Access the buffer for ConversationBufferMemory
summary = st.session_state['conversation'].memory.buffer
st.sidebar.write("Conversation Summary:\n\n" + summary)
# Defining the get_response function
def get_response(user_input, api_key):
if st.session_state['conversation'] is None:
llm = HuggingFaceHub(
repo_id="google/gemini-pro-flash", # Use Gemini Pro Flash
model_kwargs={"temperature": 0.1, "max_new_tokens": 512}
)
# Use ConversationBufferMemory for summarization
st.session_state['conversation'] = ConversationChain(
llm=llm,
verbose=True,
memory=ConversationBufferMemory()
)
response = st.session_state['conversation'].predict(input=user_input)
return response
# Chat UI
response_container = st.container()
container = st.container()
with container:
with st.form(key='my_form', clear_on_submit=True):
user_input = st.text_area("Your question goes here:", key='input', height=100)
submit_button = st.form_submit_button(label='Send')
if submit_button and user_input: # Check if user_input is not empty
st.session_state['messages'].append(user_input)
with st.spinner("Thinking..."): # Add a spinner
model_response = get_response(user_input, hf_api_key)
st.session_state['messages'].append(model_response)
with response_container:
if st.session_state['messages']: # Check if there are messages to display
for i in range(len(st.session_state['messages'])):
if (i % 2) == 0:
message(st.session_state['messages'][i], is_user=True, key=str(i) + '_user')
else:
message(st.session_state['messages'][i], key=str(i) + '_AI')