Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
•
442b638
1
Parent(s):
020f552
Update proompt_template
Browse files- app.py +15 -39
- examples_list.py +1 -1
app.py
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
"""Run codes."""
|
2 |
# pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
|
3 |
# ruff: noqa: E501
|
|
|
4 |
import os
|
5 |
import platform
|
6 |
import random
|
7 |
import time
|
8 |
from dataclasses import asdict, dataclass, field
|
9 |
from pathlib import Path
|
10 |
-
from textwrap import dedent
|
11 |
|
12 |
# from types import SimpleNamespace
|
13 |
import gradio as gr
|
@@ -22,29 +22,12 @@ from examples_list import examples_list
|
|
22 |
url = "https://huggingface.co/TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GGML/blob/main/wizardlm-1.0-uncensored-llama2-13b.ggmlv3.q4_K_S.bin" # 7.37G, Max RAM required 9.87 GB
|
23 |
|
24 |
LLM = None
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
try:
|
32 |
-
model_loc, file_size = dl_hf_model(url)
|
33 |
-
logger.info(f"done load llm {model_loc=} {file_size=}G")
|
34 |
-
except Exception as exc_:
|
35 |
-
logger.error(exc_)
|
36 |
-
raise SystemExit(1) from exc_
|
37 |
-
# """
|
38 |
-
model_loc = "models/stablebeluga2-70b.ggmlv3.q3_K_S.bin"
|
39 |
-
assert Path(model_loc).exists(), f"Make sure {model_loc=} exists."
|
40 |
-
else:
|
41 |
-
try:
|
42 |
-
logger.debug(f" dl {url}")
|
43 |
-
model_loc, file_size = dl_hf_model(url)
|
44 |
-
logger.info(f"done load llm {model_loc=} {file_size=}G")
|
45 |
-
except Exception as exc_:
|
46 |
-
logger.error(exc_)
|
47 |
-
raise SystemExit(1) from exc_
|
48 |
|
49 |
# raise SystemExit(0)
|
50 |
|
@@ -58,21 +41,14 @@ human_prefix = "### Human"
|
|
58 |
ai_prefix = "### Assistant"
|
59 |
stop_list = [f"{human_prefix}:"]
|
60 |
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
### Assistant:
|
71 |
-
"""
|
72 |
-
).lstrip()
|
73 |
-
human_prefix = "### User"
|
74 |
-
ai_prefix = "### Assistant"
|
75 |
-
stop_list = [f"{human_prefix}:"]
|
76 |
|
77 |
_ = psutil.cpu_count(logical=False) - 1
|
78 |
cpu_count: int = int(_) if _ else 1
|
|
|
1 |
"""Run codes."""
|
2 |
# pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
|
3 |
# ruff: noqa: E501
|
4 |
+
import gc
|
5 |
import os
|
6 |
import platform
|
7 |
import random
|
8 |
import time
|
9 |
from dataclasses import asdict, dataclass, field
|
10 |
from pathlib import Path
|
|
|
11 |
|
12 |
# from types import SimpleNamespace
|
13 |
import gradio as gr
|
|
|
22 |
url = "https://huggingface.co/TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GGML/blob/main/wizardlm-1.0-uncensored-llama2-13b.ggmlv3.q4_K_S.bin" # 7.37G, Max RAM required 9.87 GB
|
23 |
|
24 |
LLM = None
|
25 |
+
gc.collect()
|
26 |
+
try:
|
27 |
+
model_loc, file_size = dl_hf_model(url)
|
28 |
+
except Exception as exc_:
|
29 |
+
logger.error(exc_)
|
30 |
+
raise SystemExit(1) from exc_
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
|
32 |
# raise SystemExit(0)
|
33 |
|
|
|
41 |
ai_prefix = "### Assistant"
|
42 |
stop_list = [f"{human_prefix}:"]
|
43 |
|
44 |
+
# Prompt template: WizardLM-Vicuna
|
45 |
+
prompt_template = """You are a helpful AI assistant.
|
46 |
+
|
47 |
+
USER: {question}
|
48 |
+
ASSISTANT: """
|
49 |
+
human_prefix = "USER"
|
50 |
+
ai_prefix = "ASSISTANT"
|
51 |
+
stop_list = [f"{human_prefix}:"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
|
53 |
_ = psutil.cpu_count(logical=False) - 1
|
54 |
cpu_count: int = int(_) if _ else 1
|
examples_list.py
CHANGED
@@ -9,7 +9,7 @@ examples_list = [
|
|
9 |
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth?"
|
10 |
],
|
11 |
[
|
12 |
-
"If it takes 10 hours to dry 10 clothes,
|
13 |
],
|
14 |
["is infinity + 1 bigger than infinity?"],
|
15 |
["Explain the plot of Cinderella in a sentence."],
|
|
|
9 |
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth?"
|
10 |
],
|
11 |
[
|
12 |
+
"If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying and there is only space to hang 10 clothes, then how long will it take to dry 23 clothes? Think step by step."
|
13 |
],
|
14 |
["is infinity + 1 bigger than infinity?"],
|
15 |
["Explain the plot of Cinderella in a sentence."],
|