devve1 commited on
Commit
8b5cd21
1 Parent(s): 7eda63d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -23
app.py CHANGED
@@ -13,10 +13,10 @@ import validators
13
  import numpy as np
14
  import pandas as pd
15
  import streamlit as st
16
- from typing import List
17
  from pathlib import Path
18
  from numpy import ndarray
19
  from outlines import models
 
20
  from qdrant_client import QdrantClient
21
  from optimum_encoder import OptimumEncoder
22
  from huggingface_hub import snapshot_download
@@ -442,22 +442,29 @@ if __name__ == '__main__':
442
  try:
443
  with open(conversations_path, 'rb') as fp:
444
  packed_bytes = fp.read()
445
- conversations: dict = msgpack.unpackb(packed_bytes, raw=False)
446
  except:
447
  conversations = {}
448
 
449
- if st.session_state.menu_id == 'ChatBot':
450
  if 'chat_id' not in st.session_state:
451
- st.session_state.chat_id = 'New Chat'
452
-
 
 
 
 
 
 
 
453
  with st.sidebar:
454
- st.session_state.chat_id = st.selectbox(
455
  label='Choose a conversation',
456
- options=[st.session_state.chat_id] + list(conversations.keys()) if st.session_state.chat_id not in conversations.keys() else list(conversations.keys()),
457
  index=0,
458
  placeholder='_',
 
459
  )
460
- print(f'KEY SELECTBOX: {st.session_state.chat_id}')
461
 
462
  col1, col2 = st.columns([1,1])
463
 
@@ -504,11 +511,30 @@ if __name__ == '__main__':
504
  for message in st.session_state.messages:
505
  with st.chat_message(message["role"]):
506
  st.markdown(message["content"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
507
 
508
- if prompt := st.chat_input("Message Video Game Assistant"):
 
 
 
 
 
509
  st.chat_message("user").markdown(prompt)
510
  st.session_state.messages.append({"role": "user", "content": prompt})
511
- print(f'PROMPT: {prompt}')
512
  ai_response = main(prompt, client, collection_name, template, llm, dense_model, sparse_model)
513
  with st.chat_message("assistant"):
514
  message_placeholder = st.empty()
@@ -520,19 +546,6 @@ if __name__ == '__main__':
520
  message_placeholder.write(re.sub('▌', '', full_response))
521
 
522
  st.session_state.messages.append({"role": "assistant", "content": full_response})
523
-
524
- if st.session_state.chat_id == 'New Chat':
525
- outputs = llm.generate(
526
- prompts=template.format(system='You are a helpful assistant.', user=f"""Understand the question of the user.
527
- Resume in one single sentence what is the subject of the conversation and what is the user talking about.
528
-
529
- Question : {prompt}"""),
530
- sampling_params=vllm.SamplingParams(
531
- temperature=0.3,
532
- max_tokens=30
533
- )
534
- )
535
- st.session_state.chat_id = outputs[0].outputs[0].text
536
 
537
  conversations.update({st.session_state.chat_id: st.session_state.messages})
538
  with open(conversations_path, 'wb') as fp:
 
13
  import numpy as np
14
  import pandas as pd
15
  import streamlit as st
 
16
  from pathlib import Path
17
  from numpy import ndarray
18
  from outlines import models
19
+ from typing import List, Dict
20
  from qdrant_client import QdrantClient
21
  from optimum_encoder import OptimumEncoder
22
  from huggingface_hub import snapshot_download
 
442
  try:
443
  with open(conversations_path, 'rb') as fp:
444
  packed_bytes = fp.read()
445
+ conversations: Dict[str, list] = msgpack.unpackb(packed_bytes, raw=False)
446
  except:
447
  conversations = {}
448
 
449
+ def options_list(conversations: Dict[str, list]):
450
  if 'chat_id' not in st.session_state:
451
+ return 'New Chat'
452
+ elif st.session_state.chat_id is None:
453
+ return 'New Chat'
454
+ elif st.session_state.chat_id not in list(conversations.keys()):
455
+ return [st.session_state.chat_id] + list(conversations.keys())
456
+ else:
457
+ return list(conversations.keys())
458
+
459
+ if st.session_state.menu_id == 'ChatBot':
460
  with st.sidebar:
461
+ st.selectbox(
462
  label='Choose a conversation',
463
+ options=options_list(conversations),
464
  index=0,
465
  placeholder='_',
466
+ key='chat_id'
467
  )
 
468
 
469
  col1, col2 = st.columns([1,1])
470
 
 
511
  for message in st.session_state.messages:
512
  with st.chat_message(message["role"]):
513
  st.markdown(message["content"])
514
+
515
+ def generate_conv_title(llm: vllm.LLM, template):
516
+ if st.session_state.chat_id == 'New Chat':
517
+ outputs = llm.generate(
518
+ prompts=template.format(system='You are a helpful assistant.', user=f"""Understand the question of the user.
519
+ Resume in one single sentence what is the subject of the conversation and what is the user talking about.
520
+
521
+ Question : {st.session_state.user_input}"""),
522
+ sampling_params=vllm.SamplingParams(
523
+ temperature=0.3,
524
+ max_tokens=30
525
+ )
526
+ )
527
+ st.session_state.chat_id = outputs[0].outputs[0].text
528
 
529
+ if prompt := st.chat_input(
530
+ "Message Video Game Assistant",
531
+ on_submit=generate_conv_title,
532
+ key='user_input',
533
+ args=(llm, template)
534
+ ):
535
  st.chat_message("user").markdown(prompt)
536
  st.session_state.messages.append({"role": "user", "content": prompt})
537
+
538
  ai_response = main(prompt, client, collection_name, template, llm, dense_model, sparse_model)
539
  with st.chat_message("assistant"):
540
  message_placeholder = st.empty()
 
546
  message_placeholder.write(re.sub('▌', '', full_response))
547
 
548
  st.session_state.messages.append({"role": "assistant", "content": full_response})
 
 
 
 
 
 
 
 
 
 
 
 
 
549
 
550
  conversations.update({st.session_state.chat_id: st.session_state.messages})
551
  with open(conversations_path, 'wb') as fp: