Asankhaya Sharma commited on
Commit
f121b56
·
1 Parent(s): 093e739
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -5,7 +5,7 @@ from streamlit_chat import message
5
 
6
  checkpoint = "."
7
 
8
- @st.cache
9
  def get_model():
10
  model = AutoModelForCausalLM.from_pretrained(checkpoint)
11
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
@@ -71,7 +71,7 @@ if st.session_state.input:
71
  st.session_state.chat_history_ids = model.generate(bot_input_ids, generation_config)
72
  response = tokenizer.decode(st.session_state.chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
73
 
74
- #st.write(f"meraGPT: {response}")
75
  st.session_state.old_response = response
76
  st.session_state.response_history.append(response)
77
 
 
5
 
6
  checkpoint = "."
7
 
8
+ @st.cache_resource
9
  def get_model():
10
  model = AutoModelForCausalLM.from_pretrained(checkpoint)
11
  tokenizer = AutoTokenizer.from_pretrained(checkpoint)
 
71
  st.session_state.chat_history_ids = model.generate(bot_input_ids, generation_config)
72
  response = tokenizer.decode(st.session_state.chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
73
 
74
+ # st.write(f"meraGPT: {response}")
75
  st.session_state.old_response = response
76
  st.session_state.response_history.append(response)
77