Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import openai
|
3 |
+
import os
|
4 |
+
import json
|
5 |
+
|
6 |
+
# OpenAI API setup
|
7 |
+
openai.api_key = os.getenv("GROQ_API_KEY")
|
8 |
+
openai.api_base = "https://api.groq.com/openai/v1"
|
9 |
+
|
10 |
+
# File to store conversation history
|
11 |
+
CONVERSATION_FILE = "conversation_history.json"
|
12 |
+
|
13 |
+
# Function to load conversation history
|
14 |
+
def load_history():
|
15 |
+
if not os.path.exists(CONVERSATION_FILE):
|
16 |
+
# Create the file with an empty list as default content
|
17 |
+
with open(CONVERSATION_FILE, "w") as file:
|
18 |
+
json.dump([], file)
|
19 |
+
try:
|
20 |
+
with open(CONVERSATION_FILE, "r") as file:
|
21 |
+
return json.load(file)
|
22 |
+
except json.JSONDecodeError:
|
23 |
+
return []
|
24 |
+
|
25 |
+
# Function to save conversation history
|
26 |
+
def save_history(history):
|
27 |
+
try:
|
28 |
+
with open(CONVERSATION_FILE, "w") as file:
|
29 |
+
json.dump(history, file, indent=4)
|
30 |
+
except Exception as e:
|
31 |
+
print(f"Error saving history: {e}")
|
32 |
+
|
33 |
+
# Function to clear conversation history
|
34 |
+
def clear_conversation_history():
|
35 |
+
try:
|
36 |
+
with open(CONVERSATION_FILE, "w") as file:
|
37 |
+
json.dump([], file)
|
38 |
+
return "Conversation history cleared successfully.", ""
|
39 |
+
except Exception as e:
|
40 |
+
return f"Error clearing history: {e}", ""
|
41 |
+
|
42 |
+
# Function to get response from the LLM
|
43 |
+
def get_groq_response(message, history=[]):
|
44 |
+
try:
|
45 |
+
messages = [{"role": "system", "content": "Precise answer"}] + history + [{"role": "user", "content": message}]
|
46 |
+
response = openai.ChatCompletion.create(
|
47 |
+
model="llama-3.1-70b-versatile",
|
48 |
+
messages=messages
|
49 |
+
)
|
50 |
+
return response.choices[0].message["content"]
|
51 |
+
except Exception as e:
|
52 |
+
return f"Error: {str(e)}"
|
53 |
+
|
54 |
+
# Chatbot function
|
55 |
+
def chatbot(user_input, history):
|
56 |
+
# Load conversation history
|
57 |
+
conversation_history = history or load_history()
|
58 |
+
|
59 |
+
# Format history for the LLM
|
60 |
+
formatted_history = [{"role": "user" if i % 2 == 0 else "assistant", "content": msg} for i, (msg, _) in enumerate(conversation_history)] + \
|
61 |
+
[{"role": "assistant", "content": response} for _, response in conversation_history]
|
62 |
+
|
63 |
+
# Get bot response
|
64 |
+
bot_response = get_groq_response(user_input, formatted_history)
|
65 |
+
|
66 |
+
# Update history with the new conversation
|
67 |
+
conversation_history.append((user_input, bot_response))
|
68 |
+
|
69 |
+
# Save the updated history
|
70 |
+
save_history(conversation_history)
|
71 |
+
|
72 |
+
# Format for HTML display
|
73 |
+
display_html = "<br>".join(
|
74 |
+
f"<div><b>User:</b> {user}</div><div><b>Bot:</b> {bot}</div><br>"
|
75 |
+
for user, bot in conversation_history
|
76 |
+
)
|
77 |
+
|
78 |
+
return conversation_history, display_html, "" # Clear the user input field
|
79 |
+
|
80 |
+
# Gradio Interface
|
81 |
+
with gr.Blocks() as demo:
|
82 |
+
gr.Markdown("# Chatbot with Enhanced Formatting and Selectable Chat History")
|
83 |
+
|
84 |
+
chat_display = gr.HTML(label="Conversation")
|
85 |
+
user_input = gr.Textbox(label="Type your message here:")
|
86 |
+
clear_button = gr.Button("Clear History")
|
87 |
+
system_message = gr.Textbox(label="System Message", interactive=False)
|
88 |
+
|
89 |
+
history_state = gr.State(load_history())
|
90 |
+
|
91 |
+
# Chat interaction
|
92 |
+
user_input.submit(chatbot, inputs=[user_input, history_state], outputs=[history_state, chat_display, user_input])
|
93 |
+
|
94 |
+
# Clear history button action
|
95 |
+
clear_button.click(clear_conversation_history, inputs=None, outputs=[system_message, chat_display])
|
96 |
+
clear_button.click(lambda: [], outputs=history_state) # Reset the history state
|
97 |
+
|
98 |
+
# Launch the app
|
99 |
+
demo.launch()
|