facat commited on
Commit
9be8f93
·
1 Parent(s): e3eb48d

switch to 7b

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -81,10 +81,10 @@ def load_lora(lora_path, base_model="decapoda-research/llama-7b-hf"):
81
  return lora
82
 
83
 
84
- base_model = "decapoda-research/llama-13b-hf"
85
  tokenizer = LlamaTokenizer.from_pretrained(base_model)
86
  # question = "如果今天是星期五, 那么后天是星期几?"
87
- model = load_lora(lora_path="facat/alpaca-lora-cn-13b", base_model=base_model)
88
 
89
  eval = lambda question, input, temperature, beams, max_token: evaluate(
90
  model,
 
81
  return lora
82
 
83
 
84
+ base_model = "decapoda-research/llama-7b-hf"
85
  tokenizer = LlamaTokenizer.from_pretrained(base_model)
86
  # question = "如果今天是星期五, 那么后天是星期几?"
87
+ model = load_lora(lora_path="facat/alpaca-lora-cn", base_model=base_model)
88
 
89
  eval = lambda question, input, temperature, beams, max_token: evaluate(
90
  model,