oscarwang2's picture
Create app.py
473c6b3 verified
raw
history blame
863 Bytes
import gradio as gr
from groq import Groq
# Initialize Groq client
client = Groq()
# Function to handle the chat completion
def chat_with_bot(user_message):
completion = client.chat.completions.create(
model="llama-3.1-70b-versatile",
messages=[
{
"role": "user",
"content": user_message
}
],
temperature=1,
max_tokens=1024,
top_p=1,
stream=True,
stop=None,
)
# Collect the response
response = ""
for chunk in completion:
response += chunk.choices[0].delta.content or ""
return response
# Set up Gradio interface
iface = gr.Interface(
fn=chat_with_bot,
inputs="text",
outputs="text",
title="AI Chatbot",
description="Chat with Llama 3.1 70B",
)
# Launch the app
iface.launch()