Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -48,7 +48,7 @@ def respond(
|
|
48 |
|
49 |
# Format the conversation as a single string for the model
|
50 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
51 |
-
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, padding=True, max_length=
|
52 |
|
53 |
# Move inputs to device
|
54 |
input_ids = inputs['input_ids'].to(device)
|
@@ -78,10 +78,10 @@ def respond(
|
|
78 |
demo = gr.ChatInterface(
|
79 |
respond,
|
80 |
additional_inputs=[
|
81 |
-
gr.Textbox(value="You are a Medical AI Assistant. Please be thorough and provide an informative answer. If you don't know the answer to a specific medical inquiry, advise seeking professional help.", label="System message", lines=3),
|
82 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
83 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
84 |
-
gr.Slider(minimum=0.1, maximum=1.0, value=0.
|
85 |
],
|
86 |
title="Medical AI Assistant",
|
87 |
description="Give me your symptoms and ask me a health problem. The AI will provide informative answers. If the AI doesn't know the answer, it will advise seeking professional help.",
|
|
|
48 |
|
49 |
# Format the conversation as a single string for the model
|
50 |
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
51 |
+
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, padding=True, max_length=512)
|
52 |
|
53 |
# Move inputs to device
|
54 |
input_ids = inputs['input_ids'].to(device)
|
|
|
78 |
demo = gr.ChatInterface(
|
79 |
respond,
|
80 |
additional_inputs=[
|
81 |
+
gr.Textbox(value="You are a Medical AI Assistant. You should help to answer the health issue. Please be thorough and provide an informative answer. If you don't know the answer to a specific medical inquiry, advise seeking professional help.", label="System message", lines=3),
|
82 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
83 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
84 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.05, label="Top-p (nucleus sampling)"),
|
85 |
],
|
86 |
title="Medical AI Assistant",
|
87 |
description="Give me your symptoms and ask me a health problem. The AI will provide informative answers. If the AI doesn't know the answer, it will advise seeking professional help.",
|