File size: 4,870 Bytes
36fca29
 
411db86
 
dbf29f5
c97b6ba
71e030e
 
 
 
36fca29
 
 
 
38ef369
71e030e
38ef369
 
 
 
36fca29
dbf29f5
b14ce1c
 
89894d3
dbf29f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38ef369
 
fe6fd1e
71e030e
5400370
fe6fd1e
 
 
9a6f3ad
 
dbf29f5
9a6f3ad
 
 
b14ce1c
 
 
 
 
 
 
 
 
5400370
 
36fca29
71e030e
dbf29f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2f27661
 
dbf29f5
b4152d6
71e030e
36fca29
 
38ef369
36fca29
38ef369
 
36fca29
38ef369
dbf29f5
 
 
 
 
 
36fca29
dbf29f5
411db86
38ef369
 
73dff03
411db86
73dff03
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import streamlit as st
from streamlit_chat import message
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.chains import ConversationChain
from langchain.chains.conversation.memory import ConversationSummaryMemory

# Step 1: Set up Google API key
google_api_key = st.secrets["google_api_key"]

# Step 2: Initialize Session State Variables
if 'conversation' not in st.session_state:
    st.session_state['conversation'] = None
if 'messages' not in st.session_state:
    st.session_state['messages'] = []
if 'API_Key' not in st.session_state:
    st.session_state['API_Key'] = google_api_key  # Use the Google API key from secrets

# Step 3: Build the Streamlit UI
st.set_page_config(page_title="Chat GPT Clone", page_icon=":robot_face:")
st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True)

# Sidebar for API key input and model selection
st.sidebar.title("To start chatting,")
st.session_state['API_Key'] = st.sidebar.text_input("Enter your Google API key below", type="password", key="google_api_key_input")

# Support multiple models
st.sidebar.markdown("### Select Model:")
model_name = st.sidebar.selectbox(
    "Choose a model:",
    ["gemini-1.5-flash", "gemini-1.5-pro"],
    index=0
)

# Add instructions for users
if 'welcome' not in st.session_state:
    st.session_state['welcome'] = True

if st.session_state['welcome']:
    st.sidebar.info(
        "### Instructions:\n"
        "1. Enter your Google API key (optional if pre-configured).\n"
        "2. Choose a model from the dropdown menu.\n"
        "3. Type your question in the text area and click 'Send'.\n"
        "4. Click 'Summarise the conversation' to view a summary of your chat."
    )
    st.session_state['welcome'] = False

# Summarization button
summarise_button = st.sidebar.button("Summarise the conversation", key="summarise")

if summarise_button:
    if st.session_state['conversation'] is not None:  # Check if conversation is initialized
        # Generate summary from conversation buffer
        summary = str(st.session_state['conversation'].memory.buffer)

        # Split summary into sentences
        summary_sentences = summary.strip().split(". ")

        # Exclude the first two sentences
        filtered_summary = summary_sentences[2:]

        # Display the summary at the center
        st.markdown("---")  # Separator line
        st.markdown("<h3 style='text-align: center;'>Summary of Conversation</h3>", unsafe_allow_html=True)

        summary_container = st.container()
        with summary_container:
            for i, line in enumerate(filtered_summary):
                if line:  # Avoid blank lines
                    message(line, is_user=False, key=f"summary_{i}")
    else:
        st.sidebar.write("No conversation history to summarize.")

# Step 4: Define the getresponse function using Google's Gemini
def getresponse(userInput, api_key, model_name):
    try:
        if st.session_state['conversation'] is None:
            # Initialize the Google generative model
            with st.spinner("Setting up the conversation..."):
                chat = ChatGoogleGenerativeAI(
                    model=model_name,
                    google_api_key=api_key
                )
                st.session_state['conversation'] = ConversationChain(
                    llm=chat,
                    verbose=True,
                    memory=ConversationSummaryMemory(llm=chat)
                )

        # Get response with loading indicator
        with st.spinner("Generating response..."):
            response = st.session_state['conversation'].predict(input=userInput)
        return response

    except Exception as e:  # Generic exception handler
        st.error(f"Error: {str(e)}")
        return "Sorry, there was an issue processing your request."

# Step 5: Creating the Chat UI
response_container = st.container()
container = st.container()

with container:
    with st.form(key='my_form', clear_on_submit=True):
        user_input = st.text_area("Your question goes here:", key='input', height=100)
        submit_button = st.form_submit_button(label='Send')
        if submit_button:
            if user_input.strip():  # Check for empty input
                st.session_state['messages'].append(user_input)
                model_response = getresponse(user_input, st.session_state['API_Key'], model_name)
                st.session_state['messages'].append(model_response)
            else:
                st.warning("Please enter a message before sending.")

# Display chat messages
with response_container:
    for i in range(len(st.session_state['messages'])):
        if (i % 2) == 0:
            message(st.session_state['messages'][i], is_user=True, key=str(i) + '_user')
        else:
            message(st.session_state['messages'][i], key=str(i) + '_AI')