devve1 commited on
Commit
89a8810
1 Parent(s): 9ee751c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -442,6 +442,7 @@ if __name__ == '__main__':
442
  )
443
  print('D')
444
  else:
 
445
  st.session_state.chat_id = st.selectbox(
446
  label='Choose a conversation',
447
  options=[st.session_state.chat_id] + list(conversations.keys()),
@@ -471,7 +472,7 @@ if __name__ == '__main__':
471
  prompt_conversation = [{"role": "user", "content": f"{prompt}\nResume the above in one sentence:"}]
472
  inputs = tokenizer.apply_chat_template(prompt_conversation, tokenize=False, add_generation_prompt=True)
473
  outputs = llm.generate(prompts=inputs, sampling_params=sampling_params)
474
-
475
  st.session_state.chat_id = outputs[0].outputs[0].text
476
  st.session_state.chat_title = st.session_state.chat_id
477
  print(f'Chat Title : {st.session_state.chat_title}')
 
442
  )
443
  print('D')
444
  else:
445
+ print('Pass before')
446
  st.session_state.chat_id = st.selectbox(
447
  label='Choose a conversation',
448
  options=[st.session_state.chat_id] + list(conversations.keys()),
 
472
  prompt_conversation = [{"role": "user", "content": f"{prompt}\nResume the above in one sentence:"}]
473
  inputs = tokenizer.apply_chat_template(prompt_conversation, tokenize=False, add_generation_prompt=True)
474
  outputs = llm.generate(prompts=inputs, sampling_params=sampling_params)
475
+ print('Pass inside')
476
  st.session_state.chat_id = outputs[0].outputs[0].text
477
  st.session_state.chat_title = st.session_state.chat_id
478
  print(f'Chat Title : {st.session_state.chat_title}')