ducknew commited on
Commit
b8f4d75
·
1 Parent(s): 7732e76

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -6
app.py CHANGED
@@ -19,9 +19,6 @@ from langchain.prompts import PromptTemplate
19
  from langchain.prompts.prompt import PromptTemplate
20
  from langchain.chat_models import ChatOpenAI
21
 
22
- LOAD_MODEL=False
23
- tokenizer,model = None,None
24
-
25
  def load_model():
26
  tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
27
  model = AutoModel.from_pretrained("THUDM/chatglm-6b-int4", trust_remote_code=True).quantize(bits=4, compile_parallel_kernel=True, parallel_num=2).float()
@@ -32,9 +29,8 @@ def load_model():
32
  def chat_glm(input, history=None):
33
  if history is None:
34
  history = []
35
- if not LOAD_MODEL:
36
- LOAD_MODEL=True
37
- tokenizer,model = load_model()
38
  response, history = model.chat(tokenizer, input, history)
39
  logger.info("chatglm:", input,response)
40
  return history, history
 
19
  from langchain.prompts.prompt import PromptTemplate
20
  from langchain.chat_models import ChatOpenAI
21
 
 
 
 
22
  def load_model():
23
  tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
24
  model = AutoModel.from_pretrained("THUDM/chatglm-6b-int4", trust_remote_code=True).quantize(bits=4, compile_parallel_kernel=True, parallel_num=2).float()
 
29
  def chat_glm(input, history=None):
30
  if history is None:
31
  history = []
32
+
33
+ tokenizer,model = load_model()
 
34
  response, history = model.chat(tokenizer, input, history)
35
  logger.info("chatglm:", input,response)
36
  return history, history