|
import streamlit as st |
|
from streamlit.components.v1 import html |
|
import os |
|
from groq import Groq |
|
|
|
|
|
client = Groq(api_key=os.environ.get("GROQ_API_KEY")) |
|
|
|
|
|
st.title("AI Chatbot with Groq LLaMA Model") |
|
|
|
|
|
css = """ |
|
<style> |
|
body { |
|
background-color: #f0f2f6; |
|
font-family: 'Arial', sans-serif; |
|
} |
|
.stTextInput, .stButton { |
|
margin-top: 20px; |
|
} |
|
.custom-box { |
|
padding: 20px; |
|
background-color: #ffffff; |
|
border-radius: 8px; |
|
box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1); |
|
} |
|
.error-msg { |
|
color: red; |
|
font-weight: bold; |
|
} |
|
</style> |
|
""" |
|
|
|
html(css) |
|
|
|
st.markdown( |
|
""" |
|
<div class='custom-box'> |
|
<h2>Ask anything about Data Science</h2> |
|
<p>Powered by Groq LLaMA 3.1 Model π</p> |
|
</div> |
|
""", unsafe_allow_html=True |
|
) |
|
|
|
|
|
user_input = st.text_input("Ask your question here:") |
|
|
|
|
|
def get_groq_response(query): |
|
try: |
|
chat_completion = client.chat.completions.create( |
|
messages=[{"role": "user", "content": query}], |
|
model="llama-3.1-70b-versatile", |
|
) |
|
return chat_completion.choices[0].message.content |
|
except Exception as e: |
|
return f"An error occurred: {str(e)}" |
|
|
|
|
|
if st.button("Submit"): |
|
if user_input: |
|
with st.spinner("Fetching response..."): |
|
response = get_groq_response(user_input) |
|
st.success(response) |
|
else: |
|
st.error("Please enter a question!") |
|
|
|
|
|
if response: |
|
st.code(response) |
|
st.button("Copy to clipboard", on_click=st.experimental_rerun) |
|
|