ffreemt commited on
Commit
442b638
1 Parent(s): 020f552

Update proompt_template

Browse files
Files changed (2) hide show
  1. app.py +15 -39
  2. examples_list.py +1 -1
app.py CHANGED
@@ -1,13 +1,13 @@
1
  """Run codes."""
2
  # pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
3
  # ruff: noqa: E501
 
4
  import os
5
  import platform
6
  import random
7
  import time
8
  from dataclasses import asdict, dataclass, field
9
  from pathlib import Path
10
- from textwrap import dedent
11
 
12
  # from types import SimpleNamespace
13
  import gradio as gr
@@ -22,29 +22,12 @@ from examples_list import examples_list
22
  url = "https://huggingface.co/TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GGML/blob/main/wizardlm-1.0-uncensored-llama2-13b.ggmlv3.q4_K_S.bin" # 7.37G, Max RAM required 9.87 GB
23
 
24
  LLM = None
25
-
26
- if "forindo" in platform.node(): # deploy 70b model locally
27
- # url = "https://huggingface.co/TheBloke/llama-2-70b-Guanaco-QLoRA-GGML/blob/main/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin" # 29.7G
28
- # model_loc = "/home/mu2018/github/langchain-llama-2-70b-guanaco-qlora-ggml/models/llama-2-70b-guanaco-qlora.ggmlv3.q3_K_S.bin"
29
- _ = """
30
- url = "https://huggingface.co/TheBloke/StableBeluga2-70B-GGML/blob/main/stablebeluga2-70b.ggmlv3.q3_K_S.bin"
31
- try:
32
- model_loc, file_size = dl_hf_model(url)
33
- logger.info(f"done load llm {model_loc=} {file_size=}G")
34
- except Exception as exc_:
35
- logger.error(exc_)
36
- raise SystemExit(1) from exc_
37
- # """
38
- model_loc = "models/stablebeluga2-70b.ggmlv3.q3_K_S.bin"
39
- assert Path(model_loc).exists(), f"Make sure {model_loc=} exists."
40
- else:
41
- try:
42
- logger.debug(f" dl {url}")
43
- model_loc, file_size = dl_hf_model(url)
44
- logger.info(f"done load llm {model_loc=} {file_size=}G")
45
- except Exception as exc_:
46
- logger.error(exc_)
47
- raise SystemExit(1) from exc_
48
 
49
  # raise SystemExit(0)
50
 
@@ -58,21 +41,14 @@ human_prefix = "### Human"
58
  ai_prefix = "### Assistant"
59
  stop_list = [f"{human_prefix}:"]
60
 
61
- if "beluga" in model_loc.lower():
62
- prompt_template = dedent(
63
- """
64
- ### System:
65
- You are Stable Beluga, an AI that follows instructions extremely well. Help as much as you can.
66
- Let's think step by step.
67
-
68
- ### User: {question}
69
-
70
- ### Assistant:
71
- """
72
- ).lstrip()
73
- human_prefix = "### User"
74
- ai_prefix = "### Assistant"
75
- stop_list = [f"{human_prefix}:"]
76
 
77
  _ = psutil.cpu_count(logical=False) - 1
78
  cpu_count: int = int(_) if _ else 1
 
1
  """Run codes."""
2
  # pylint: disable=line-too-long, broad-exception-caught, invalid-name, missing-function-docstring, too-many-instance-attributes, missing-class-docstring
3
  # ruff: noqa: E501
4
+ import gc
5
  import os
6
  import platform
7
  import random
8
  import time
9
  from dataclasses import asdict, dataclass, field
10
  from pathlib import Path
 
11
 
12
  # from types import SimpleNamespace
13
  import gradio as gr
 
22
  url = "https://huggingface.co/TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GGML/blob/main/wizardlm-1.0-uncensored-llama2-13b.ggmlv3.q4_K_S.bin" # 7.37G, Max RAM required 9.87 GB
23
 
24
  LLM = None
25
+ gc.collect()
26
+ try:
27
+ model_loc, file_size = dl_hf_model(url)
28
+ except Exception as exc_:
29
+ logger.error(exc_)
30
+ raise SystemExit(1) from exc_
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
  # raise SystemExit(0)
33
 
 
41
  ai_prefix = "### Assistant"
42
  stop_list = [f"{human_prefix}:"]
43
 
44
+ # Prompt template: WizardLM-Vicuna
45
+ prompt_template = """You are a helpful AI assistant.
46
+
47
+ USER: {question}
48
+ ASSISTANT: """
49
+ human_prefix = "USER"
50
+ ai_prefix = "ASSISTANT"
51
+ stop_list = [f"{human_prefix}:"]
 
 
 
 
 
 
 
52
 
53
  _ = psutil.cpu_count(logical=False) - 1
54
  cpu_count: int = int(_) if _ else 1
examples_list.py CHANGED
@@ -9,7 +9,7 @@ examples_list = [
9
  "If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth?"
10
  ],
11
  [
12
- "If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth? Think step by step."
13
  ],
14
  ["is infinity + 1 bigger than infinity?"],
15
  ["Explain the plot of Cinderella in a sentence."],
 
9
  "If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying , then how long will it take to dry a cloth?"
10
  ],
11
  [
12
+ "If it takes 10 hours to dry 10 clothes, assuming all the clothes are hung together at the same time for drying and there is only space to hang 10 clothes, then how long will it take to dry 23 clothes? Think step by step."
13
  ],
14
  ["is infinity + 1 bigger than infinity?"],
15
  ["Explain the plot of Cinderella in a sentence."],