talltree commited on
Commit
61c5e3c
β€’
1 Parent(s): c2b216b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -9
app.py CHANGED
@@ -1,14 +1,19 @@
1
  import openai
2
  import streamlit as st
3
  from langchain_core.messages import AIMessage, ChatMessage, HumanMessage
 
 
 
4
 
5
  from rag_chain.chain import get_rag_chain
6
 
 
 
7
  # Streamlit page configuration
8
- st.set_page_config(page_title="Tall Tree Integrated Health",
9
  page_icon="πŸ’¬",
10
  layout="centered",
11
- initial_sidebar_state="collapsed")
12
 
13
  # Streamlit CSS configuration
14
 
@@ -37,7 +42,6 @@ def get_chain_and_memory():
37
  # gpt-4 points to gpt-4-0613
38
  # gpt-4-turbo-preview points to gpt-4-0125-preview
39
  # Fine-tuned: ft:gpt-3.5-turbo-1106:tall-tree::8mAkOSED
40
- # gpt-4-1106-preview
41
  return get_rag_chain(model_name="gpt-4-1106-preview", temperature=0.2)
42
 
43
  except Exception as e:
@@ -55,6 +59,27 @@ if "history" not in st.session_state:
55
  if "messages" not in st.session_state:
56
  st.session_state["messages"] = []
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  # Select locations element into a container
59
  with st.container(border=False):
60
  # Set the welcome message
@@ -69,9 +94,8 @@ with st.container(border=False):
69
  index=None, horizontal=False,
70
  )
71
 
72
- # Add some space between the container and the chat interface
73
- for _ in range(2):
74
- st.markdown("\n\n")
75
 
76
  # Get user input only if a location is selected
77
  prompt = ""
@@ -108,9 +132,11 @@ if prompt:
108
  try:
109
  partial_message = ""
110
  with st.spinner(" "):
111
- for chunk in chain.stream({"message": prompt}):
112
- partial_message += chunk
113
- message_placeholder.markdown(partial_message + "|")
 
 
114
  except openai.BadRequestError:
115
  st.warning(openai_api_error_message, icon="πŸ™")
116
  st.stop()
@@ -128,3 +154,45 @@ if prompt:
128
  # add the full response to the message history
129
  st.session_state["messages"].append(ChatMessage(
130
  role="assistant", content=partial_message))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import openai
2
  import streamlit as st
3
  from langchain_core.messages import AIMessage, ChatMessage, HumanMessage
4
+ from langchain_core.tracers.context import collect_runs
5
+ from langsmith import Client
6
+ from streamlit_feedback import streamlit_feedback
7
 
8
  from rag_chain.chain import get_rag_chain
9
 
10
+ client = Client()
11
+
12
  # Streamlit page configuration
13
+ st.set_page_config(page_title="Tall Tree Health",
14
  page_icon="πŸ’¬",
15
  layout="centered",
16
+ initial_sidebar_state="expanded")
17
 
18
  # Streamlit CSS configuration
19
 
 
42
  # gpt-4 points to gpt-4-0613
43
  # gpt-4-turbo-preview points to gpt-4-0125-preview
44
  # Fine-tuned: ft:gpt-3.5-turbo-1106:tall-tree::8mAkOSED
 
45
  return get_rag_chain(model_name="gpt-4-1106-preview", temperature=0.2)
46
 
47
  except Exception as e:
 
59
  if "messages" not in st.session_state:
60
  st.session_state["messages"] = []
61
 
62
+ # Add a sidebar
63
+
64
+ st.sidebar.markdown(
65
+ """
66
+ ### Your Feedback Matters!
67
+ Help us enhance our AI-powered assistant by sharing your feedback.\n\n
68
+ **Rate the Response Quality:**
69
+
70
+ - πŸ˜€ **Excellent (Score: 1):** Complete and clear answer.
71
+ - πŸ™‚ **Good (Score: 0.75):** Helpful, but could be clearer or more detailed.
72
+ - 😐 **Neutral (Score: 0.5):** Somewhat helpful, partially correct.
73
+ - πŸ™ **Poor (Score: 0.25):** Mostly incorrect or unhelpful.
74
+ - 😞 **Very Poor (Score: 0):** Completely incorrect or not helpful.
75
+
76
+ Thank you! Let's get started. πŸš€
77
+ """
78
+ )
79
+
80
+ # Add delimiter between sidebar expander and the welcome message
81
+ st.markdown("###")
82
+
83
  # Select locations element into a container
84
  with st.container(border=False):
85
  # Set the welcome message
 
94
  index=None, horizontal=False,
95
  )
96
 
97
+ # Add delimiter between the container and the chat interface
98
+ st.markdown("###")
 
99
 
100
  # Get user input only if a location is selected
101
  prompt = ""
 
132
  try:
133
  partial_message = ""
134
  with st.spinner(" "):
135
+ with collect_runs() as cb: # Collect runs for feedback in langsmith
136
+ for chunk in chain.stream({"message": prompt}):
137
+ partial_message += chunk
138
+ message_placeholder.markdown(partial_message + "|")
139
+ st.session_state.run_id = cb.traced_runs[0].id
140
  except openai.BadRequestError:
141
  st.warning(openai_api_error_message, icon="πŸ™")
142
  st.stop()
 
154
  # add the full response to the message history
155
  st.session_state["messages"].append(ChatMessage(
156
  role="assistant", content=partial_message))
157
+
158
+
159
+ # Feedback system using streamlit feedback and Langsmith
160
+ feedback_option = "faces"
161
+
162
+ if st.session_state.get("run_id"):
163
+ run_id = st.session_state.run_id
164
+ feedback = streamlit_feedback(
165
+ feedback_type=feedback_option,
166
+ optional_text_label="[Optional] Please provide an explanation",
167
+ key=f"feedback_{run_id}",
168
+ )
169
+ score_mappings = {
170
+ "thumbs": {"πŸ‘": 1, "πŸ‘Ž": 0},
171
+ "faces": {"πŸ˜€": 1, "πŸ™‚": 0.75, "😐": 0.5, "πŸ™": 0.25, "😞": 0},
172
+ }
173
+
174
+ # Get the score mapping based on the selected feedback option
175
+ scores = score_mappings[feedback_option]
176
+
177
+ if feedback:
178
+ # Get the score from the selected feedback option's score mapping
179
+ score = scores.get(feedback["score"])
180
+
181
+ if score is not None:
182
+ # Formulate feedback type string incorporating the feedback option
183
+ # and score value
184
+ feedback_type_str = f"{feedback_option} {feedback['score']}"
185
+
186
+ # Record the feedback with the formulated feedback type string
187
+ feedback_record = client.create_feedback(
188
+ run_id,
189
+ feedback_type_str,
190
+ score=score,
191
+ comment=feedback.get("text"),
192
+ )
193
+ st.session_state.feedback = {
194
+ "feedback_id": str(feedback_record.id),
195
+ "score": score,
196
+ }
197
+ else:
198
+ st.warning("Invalid feedback score.")