Spaces:
Sleeping
Sleeping
import streamlit as st | |
from streamlit_chat import message | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
from langchain.chains import ConversationChain | |
from langchain.chains.conversation.memory import ConversationSummaryMemory | |
# Step 1: Set up Google API key | |
google_api_key = st.secrets["google_api_key"] | |
# Step 2: Initialize Session State Variables | |
if 'conversation' not in st.session_state: | |
st.session_state['conversation'] = None | |
if 'messages' not in st.session_state: | |
st.session_state['messages'] = [] | |
if 'API_Key' not in st.session_state: | |
st.session_state['API_Key'] = google_api_key # Use the Google API key from secrets | |
# Step 3: Build the Streamlit UI | |
st.set_page_config(page_title="Chat GPT Clone", page_icon=":robot_face:") | |
st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True) | |
# Sidebar for API key input and model selection | |
st.sidebar.title("To start chatting,") | |
st.session_state['API_Key'] = st.sidebar.text_input("Enter your Google API key below", type="password", key="google_api_key_input") | |
# Support multiple models | |
st.sidebar.markdown("### Select Model:") | |
model_name = st.sidebar.selectbox( | |
"Choose a model:", | |
["gemini-1.5-flash", "gemini-1.5-pro"], | |
index=0 | |
) | |
# Add instructions for users | |
if 'welcome' not in st.session_state: | |
st.session_state['welcome'] = True | |
if st.session_state['welcome']: | |
st.sidebar.info( | |
"### Instructions:\n" | |
"1. Enter your Google API key (optional if pre-configured).\n" | |
"2. Choose a model from the dropdown menu.\n" | |
"3. Type your question in the text area and click 'Send'.\n" | |
"4. Click 'Summarise the conversation' to view a summary of your chat." | |
) | |
st.session_state['welcome'] = False | |
# Summarization button | |
summarise_button = st.sidebar.button("Summarise the conversation", key="summarise") | |
if summarise_button: | |
if st.session_state['conversation'] is not None: # Check if conversation is initialized | |
# Generate summary from conversation buffer | |
summary = str(st.session_state['conversation'].memory.buffer) | |
# Split summary into sentences | |
summary_sentences = summary.strip().split(". ") | |
# Exclude the first two sentences | |
filtered_summary = summary_sentences[2:] | |
# Display the summary at the center | |
st.markdown("---") # Separator line | |
st.markdown("<h3 style='text-align: center;'>Summary of Conversation</h3>", unsafe_allow_html=True) | |
summary_container = st.container() | |
with summary_container: | |
for i, line in enumerate(filtered_summary): | |
if line: # Avoid blank lines | |
message(line, is_user=False, key=f"summary_{i}") | |
else: | |
st.sidebar.write("No conversation history to summarize.") | |
# Step 4: Define the getresponse function using Google's Gemini | |
def getresponse(userInput, api_key, model_name): | |
try: | |
if st.session_state['conversation'] is None: | |
# Initialize the Google generative model | |
with st.spinner("Setting up the conversation..."): | |
chat = ChatGoogleGenerativeAI( | |
model=model_name, | |
google_api_key=api_key | |
) | |
st.session_state['conversation'] = ConversationChain( | |
llm=chat, | |
verbose=True, | |
memory=ConversationSummaryMemory(llm=chat) | |
) | |
# Get response with loading indicator | |
with st.spinner("Generating response..."): | |
response = st.session_state['conversation'].predict(input=userInput) | |
return response | |
except Exception as e: # Generic exception handler | |
st.error(f"Error: {str(e)}") | |
return "Sorry, there was an issue processing your request." | |
# Step 5: Creating the Chat UI | |
response_container = st.container() | |
container = st.container() | |
with container: | |
with st.form(key='my_form', clear_on_submit=True): | |
user_input = st.text_area("Your question goes here:", key='input', height=100) | |
submit_button = st.form_submit_button(label='Send') | |
if submit_button: | |
if user_input.strip(): # Check for empty input | |
st.session_state['messages'].append(user_input) | |
model_response = getresponse(user_input, st.session_state['API_Key'], model_name) | |
st.session_state['messages'].append(model_response) | |
else: | |
st.warning("Please enter a message before sending.") | |
# Display chat messages | |
with response_container: | |
for i in range(len(st.session_state['messages'])): | |
if (i % 2) == 0: | |
message(st.session_state['messages'][i], is_user=True, key=str(i) + '_user') | |
else: | |
message(st.session_state['messages'][i], key=str(i) + '_AI') | |