devve1 commited on
Commit
1ddf064
β€’
1 Parent(s): 4075037

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -51
app.py CHANGED
@@ -790,6 +790,10 @@ if __name__ == '__main__':
790
  args=(conversations_path, conversations)
791
  )
792
 
 
 
 
 
793
  def generate_conv_title(llm):
794
  if st.session_state.chat_id == 'New Conversation':
795
  output = llm.chat(
@@ -807,19 +811,9 @@ if __name__ == '__main__':
807
  packed_bytes = msgpack.packb(conversations, use_bin_type=True)
808
  fp.write(packed_bytes)
809
 
810
- chat_placeholder = st.empty()
811
-
812
- with chat_placeholder.container():
813
- for i, msg in enumerate(st.session_state.messages):
814
- nkey = int(i/2)
815
- if msg["role"] == "user":
816
- message(msg["content"], is_user=True, key='chat_messages_user_'+str(nkey))
817
- else:
818
- message(msg["content"], is_user=False, key='chat_messages_assistant_'+str(nkey))
819
-
820
- pills(" ", ["Option 1", "Option 2", "Option 3"], ["πŸ€", "🎈", "🌈"])
821
-
822
  with st.container():
 
 
823
  if prompt := st.text_input(
824
  ' ',
825
  on_change=generate_conv_title,
@@ -828,9 +822,7 @@ if __name__ == '__main__':
828
  label_visibility='collapsed',
829
  args=(st.session_state.llm, )
830
  ):
831
- nkey = int(len(st.session_state.messages)/2)
832
-
833
- message(prompt, is_user=True, key='chat_messages_user_'+str(nkey))
834
  st.session_state.messages.append({"role": "user", "content": prompt})
835
 
836
  ai_response = generate_answer(
@@ -844,41 +836,28 @@ if __name__ == '__main__':
844
  st.session_state.documents_only
845
  )
846
 
847
- message(ai_response, key='chat_messages_assistant_'+str(nkey))
848
- st.session_state.messages.append({"role": "assistant", "content": ai_response})
849
-
850
- if prompt := st.chat_input(
851
- "Message Video Game Assistant",
852
- on_submit=generate_conv_title,
853
- key='user_input',
854
- args=(st.session_state.llm, )
855
- ):
856
- st.chat_message("user").markdown(prompt)
857
- st.session_state.messages.append({"role": "user", "content": prompt})
858
-
859
- ai_response = generate_answer(
860
- prompt,
861
- st.session_state.client,
862
- st.session_state.collection_name,
863
- st.session_state.model,
864
- st.session_state.dense_model,
865
- st.session_state.sparse_model,
866
- "\n".join([f'{msg["role"]}: {msg["content"]}' for msg in st.session_state.messages]),
867
- st.session_state.documents_only
868
- )
869
-
870
- with st.chat_message("assistant"):
871
- message_placeholder = st.empty()
872
- full_response = ""
873
- for chunk in re.split(r'(\s+)', ai_response):
874
- full_response += chunk + " "
875
- time.sleep(0.05)
876
- message_placeholder.write(full_response + 'β–Œ')
877
- message_placeholder.write(re.sub('β–Œ', '', full_response))
878
 
879
- st.session_state.messages.append({"role": "assistant", "content": full_response})
880
 
881
- conversations.update({st.session_state.id_chat: st.session_state.messages})
882
- with open(conversations_path, 'wb') as fp:
883
- packed_bytes = msgpack.packb(conversations, use_bin_type=True)
884
- fp.write(packed_bytes)
 
 
 
 
 
 
 
 
 
 
 
790
  args=(conversations_path, conversations)
791
  )
792
 
793
+ for message in st.session_state.messages:
794
+ with st.chat_message(message["role"]):
795
+ st.markdown(message["content"])
796
+
797
  def generate_conv_title(llm):
798
  if st.session_state.chat_id == 'New Conversation':
799
  output = llm.chat(
 
811
  packed_bytes = msgpack.packb(conversations, use_bin_type=True)
812
  fp.write(packed_bytes)
813
 
 
 
 
 
 
 
 
 
 
 
 
 
814
  with st.container():
815
+ pills(" ", ["Option 1", "Option 2", "Option 3"], ["πŸ€", "🎈", "🌈"])
816
+
817
  if prompt := st.text_input(
818
  ' ',
819
  on_change=generate_conv_title,
 
822
  label_visibility='collapsed',
823
  args=(st.session_state.llm, )
824
  ):
825
+ st.chat_message("user").markdown(prompt)
 
 
826
  st.session_state.messages.append({"role": "user", "content": prompt})
827
 
828
  ai_response = generate_answer(
 
836
  st.session_state.documents_only
837
  )
838
 
839
+ with st.chat_message("assistant"):
840
+ message_placeholder = st.empty()
841
+ full_response = ""
842
+ for chunk in re.split(r'(\s+)', ai_response):
843
+ full_response += chunk + " "
844
+ time.sleep(0.05)
845
+ message_placeholder.write(full_response + 'β–Œ')
846
+ message_placeholder.write(re.sub('β–Œ', '', full_response))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
847
 
848
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
849
 
850
+ conversations.update({st.session_state.id_chat: st.session_state.messages})
851
+ with open(conversations_path, 'wb') as fp:
852
+ packed_bytes = msgpack.packb(conversations, use_bin_type=True)
853
+ fp.write(packed_bytes)
854
+
855
+ styl = f"""
856
+ <style>
857
+ .stTextInput {{
858
+ position: fixed;
859
+ bottom: 3rem;
860
+ }}
861
+ </style>
862
+ """
863
+ st.markdown(styl, unsafe_allow_html=True)