|
import gradio as gr |
|
from huggingface_hub import InferenceClient |
|
|
|
|
|
custom_css = """ |
|
#chat-container { |
|
background-color: #0a0a1a; |
|
min-height: 100vh; |
|
color: white; |
|
} |
|
|
|
#header { |
|
background-color: #0a0a1a; |
|
padding: 1rem 2rem; |
|
border-bottom: 1px solid #2a2a3a; |
|
} |
|
|
|
#logo { |
|
display: flex; |
|
align-items: center; |
|
gap: 0.5rem; |
|
} |
|
|
|
#nav { |
|
display: flex; |
|
justify-content: space-between; |
|
align-items: center; |
|
} |
|
|
|
#nav-links { |
|
display: flex; |
|
gap: 2rem; |
|
} |
|
|
|
.nav-link { |
|
color: #ffffff; |
|
text-decoration: none; |
|
padding: 0.5rem 1rem; |
|
} |
|
|
|
#main-content { |
|
max-width: 800px; |
|
margin: 2rem auto; |
|
padding: 0 1rem; |
|
} |
|
|
|
#chat-title { |
|
font-size: 2.5rem; |
|
font-weight: bold; |
|
text-align: center; |
|
margin-bottom: 1rem; |
|
} |
|
|
|
#chat-description { |
|
text-align: center; |
|
color: #cccccc; |
|
margin-bottom: 2rem; |
|
} |
|
|
|
.chatbot-container { |
|
background-color: #13131f; |
|
border-radius: 0.5rem; |
|
padding: 1rem; |
|
} |
|
|
|
.message { |
|
background-color: #1a1a2a; |
|
border-radius: 0.5rem; |
|
padding: 1rem; |
|
margin-bottom: 1rem; |
|
} |
|
|
|
.chat-input { |
|
background-color: #1a1a2a; |
|
border: 1px solid #2a2a3a; |
|
border-radius: 0.5rem; |
|
color: white; |
|
} |
|
""" |
|
|
|
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") |
|
|
|
def respond( |
|
message, |
|
chat_history, |
|
system_message, |
|
max_tokens, |
|
temperature, |
|
top_p, |
|
): |
|
if message.strip() == "": |
|
return "", chat_history |
|
|
|
messages = [{"role": "system", "content": system_message}] |
|
|
|
|
|
for human, assistant in chat_history: |
|
messages.append({"role": "user", "content": human}) |
|
if assistant: |
|
messages.append({"role": "assistant", "content": assistant}) |
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
try: |
|
response = "" |
|
for chunk in client.chat_completion( |
|
messages, |
|
max_tokens=max_tokens, |
|
stream=True, |
|
temperature=temperature, |
|
top_p=top_p, |
|
): |
|
if hasattr(chunk.choices[0].delta, 'content'): |
|
if chunk.choices[0].delta.content is not None: |
|
response += chunk.choices[0].delta.content |
|
|
|
chat_history.append((message, response)) |
|
return "", chat_history |
|
except Exception as e: |
|
return "", chat_history + [(message, f"Error: {str(e)}")] |
|
|
|
with gr.Blocks(css=custom_css) as demo: |
|
with gr.Column(elem_id="chat-container"): |
|
|
|
with gr.Row(elem_id="header"): |
|
with gr.Column(elem_id="logo"): |
|
gr.Markdown("π ZiF-V0") |
|
with gr.Row(elem_id="nav-links"): |
|
gr.Markdown("[AI Chat](#) [AI Image Generator](#) [AI Video](#) [AI Music Generator](#) [Login](#)") |
|
|
|
|
|
with gr.Column(elem_id="main-content"): |
|
gr.Markdown("# AI Chat", elem_id="chat-title") |
|
gr.Markdown( |
|
"AI Chat is an AI chatbot that writes text. You can use it to write stories, messages, or " |
|
"programming code. You can use the AI chatbot as a virtual tutor in almost any subject.", |
|
elem_id="chat-description" |
|
) |
|
|
|
|
|
chatbot = gr.Chatbot(elem_classes="chatbot-container") |
|
msg = gr.Textbox( |
|
placeholder="Chat with AI...", |
|
elem_classes="chat-input", |
|
show_label=False |
|
) |
|
|
|
|
|
with gr.Accordion("Advanced Settings", open=False): |
|
system_message = gr.Textbox( |
|
value="You are a friendly Chatbot.", |
|
label="System message" |
|
) |
|
max_tokens = gr.Slider( |
|
minimum=1, |
|
maximum=2048, |
|
value=512, |
|
step=1, |
|
label="Max new tokens" |
|
) |
|
temperature = gr.Slider( |
|
minimum=0.1, |
|
maximum=4.0, |
|
value=0.7, |
|
step=0.1, |
|
label="Temperature" |
|
) |
|
top_p = gr.Slider( |
|
minimum=0.1, |
|
maximum=1.0, |
|
value=0.95, |
|
step=0.05, |
|
label="Top-p (nucleus sampling)" |
|
) |
|
|
|
|
|
msg.submit( |
|
respond, |
|
inputs=[msg, chatbot, system_message, max_tokens, temperature, top_p], |
|
outputs=[msg, chatbot] |
|
) |
|
|
|
|
|
clear = gr.Button("Clear") |
|
clear.click(lambda: None, None, chatbot, queue=False) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |