|
import streamlit as st |
|
from models import demo |
|
|
|
|
|
st.set_page_config( |
|
page_title="DeepSeek Chatbot", |
|
page_icon="π€", |
|
layout="wide" |
|
) |
|
|
|
|
|
st.title("DeepSeek Chatbot") |
|
st.markdown(""" |
|
Created by [ruslanmv.com](https://ruslanmv.com/) |
|
This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit". |
|
You can also adjust optional parameters like system message, max new tokens, temperature, and top-p. |
|
""") |
|
|
|
|
|
with st.sidebar: |
|
st.header("Options") |
|
model_choice = st.radio( |
|
"Choose a Model", |
|
options=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"], |
|
index=1 |
|
) |
|
|
|
with st.expander("Optional Parameters", expanded=False): |
|
system_message = st.text_area( |
|
"System Message", |
|
value="You are a friendly Chatbot created by ruslanmv.com", |
|
height=100 |
|
) |
|
max_new_tokens = st.slider( |
|
"Max New Tokens", |
|
min_value=1, |
|
max_value=4000, |
|
value=200 |
|
) |
|
temperature = st.slider( |
|
"Temperature", |
|
min_value=0.10, |
|
max_value=4.00, |
|
value=0.70 |
|
) |
|
top_p = st.slider( |
|
"Top-p (nucleus sampling)", |
|
min_value=0.10, |
|
max_value=1.00, |
|
value=0.90 |
|
) |
|
|
|
|
|
def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p): |
|
|
|
payload = { |
|
"messages": [{"role": "user", "content": input_text}], |
|
"system": system_message, |
|
"max_tokens": max_new_tokens, |
|
"temperature": temperature, |
|
"top_p": top_p |
|
} |
|
|
|
|
|
try: |
|
response = demo(payload) |
|
if isinstance(response, dict) and "choices" in response: |
|
assistant_response = response["choices"][0]["message"]["content"] |
|
else: |
|
assistant_response = "Unexpected model response format." |
|
except Exception as e: |
|
assistant_response = f"Error: {str(e)}" |
|
|
|
|
|
history.append((input_text, assistant_response)) |
|
return history |
|
|
|
|
|
if "chat_history" not in st.session_state: |
|
st.session_state.chat_history = [] |
|
|
|
|
|
st.header("Chat with DeepSeek") |
|
|
|
|
|
for user_msg, assistant_msg in st.session_state.chat_history: |
|
with st.chat_message("user"): |
|
st.write(user_msg) |
|
with st.chat_message("assistant"): |
|
st.write(assistant_msg) |
|
|
|
|
|
input_text = st.chat_input("Type your message here...") |
|
|
|
|
|
if input_text: |
|
|
|
st.session_state.chat_history = chatbot( |
|
input_text, |
|
st.session_state.chat_history, |
|
model_choice, |
|
system_message, |
|
max_new_tokens, |
|
temperature, |
|
top_p |
|
) |
|
|
|
|
|
st.rerun() |