Spaces:
Sleeping
Sleeping
File size: 1,290 Bytes
a517e71 0aaa81f 22c6fbc 0aaa81f a517e71 fb6ca46 bbb9aa4 fb6ca46 bbb9aa4 fb6ca46 0aaa81f bbb9aa4 fb6ca46 a517e71 fb6ca46 bbb9aa4 fb6ca46 0aaa81f fb6ca46 0aaa81f fb6ca46 0aaa81f bbb9aa4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
import os
import gradio as gr
import google.generativeai as genai
# Configure the Gemini API
api_key = os.environ.get("GEMINI_API_KEY")
if not api_key:
raise ValueError("GEMINI_API_KEY not found in environment variables. Please set it in Hugging Face Spaces secrets.")
genai.configure(api_key=api_key)
# Create the model
model = genai.GenerativeModel('gemini-pro')
# Initialize chat history
chat = model.start_chat(history=[])
def respond(message, history):
history = history or []
try:
# Send user message and get response
response = chat.send_message(message)
bot_message = response.text
# Append to history and return
history.append((message, bot_message))
return history, history
except Exception as e:
error_message = f"An error occurred: {str(e)}"
history.append((message, error_message))
return history, history
# Define the Gradio interface
with gr.Blocks() as demo:
chatbot = gr.Chatbot(label="Chat with Gemini")
msg = gr.Textbox(label="Type your message here")
clear = gr.Button("Clear")
msg.submit(respond, [msg, chatbot], [chatbot, chatbot])
clear.click(lambda: None, outputs=[chatbot])
# Launch the app
if __name__ == "__main__":
demo.launch() |