Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
·
5fb6cc5
1
Parent(s):
3cae1b6
Update generate function
Browse files- app.py +4 -6
- run-app.sh +1 -1
app.py
CHANGED
@@ -129,11 +129,10 @@ def generate(
|
|
129 |
generation_config: GenerationConfig = GenerationConfig(),
|
130 |
):
|
131 |
"""Run model inference, will return a Generator if streaming is true."""
|
132 |
-
#
|
133 |
-
|
134 |
-
print(_)
|
135 |
return llm(
|
136 |
-
|
137 |
**asdict(generation_config),
|
138 |
)
|
139 |
|
@@ -341,8 +340,7 @@ with gr.Blocks(
|
|
341 |
# """<center><a href="https://huggingface.co/spaces/mikeee/mpt-30b-chat?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate"></a> and spin a CPU UPGRADE to avoid the queue</center>"""
|
342 |
# )
|
343 |
gr.Markdown(
|
344 |
-
f"""<h5><center
|
345 |
-
The bot only speaks English.
|
346 |
|
347 |
Most examples are meant for another model.
|
348 |
You probably should try to test
|
|
|
129 |
generation_config: GenerationConfig = GenerationConfig(),
|
130 |
):
|
131 |
"""Run model inference, will return a Generator if streaming is true."""
|
132 |
+
# _ = prompt_template.format(question=question)
|
133 |
+
# print(_)
|
|
|
134 |
return llm(
|
135 |
+
question,
|
136 |
**asdict(generation_config),
|
137 |
)
|
138 |
|
|
|
340 |
# """<center><a href="https://huggingface.co/spaces/mikeee/mpt-30b-chat?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate"></a> and spin a CPU UPGRADE to avoid the queue</center>"""
|
341 |
# )
|
342 |
gr.Markdown(
|
343 |
+
f"""<h5><center>{Path(model_loc).name}</center></h4>
|
|
|
344 |
|
345 |
Most examples are meant for another model.
|
346 |
You probably should try to test
|
run-app.sh
CHANGED
@@ -1 +1 @@
|
|
1 |
-
nodemon -w . -x python app.py
|
|
|
1 |
+
nodemon -w app.py -x python app.py
|