tedslin commited on
Commit
56d96ca
·
verified ·
1 Parent(s): 07b5226

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +106 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from openai import OpenAI
3
+ from typing import Iterator
4
+ import os
5
+ from phoenix.otel import register
6
+
7
+ tracer_provider = register(
8
+ project_name=os.getenv('PHOENIX_PROJECT_NAME'),
9
+ endpoint=os.getenv('PHOENIX_COLLECTOR_ENDPOINT'),
10
+ )
11
+
12
+ from openinference.instrumentation.openai import OpenAIInstrumentor
13
+ OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)
14
+
15
+ # Configure page settings
16
+ st.set_page_config(
17
+ page_title="LLM Taiwan Chat",
18
+ page_icon="💬",
19
+ layout="centered"
20
+ )
21
+
22
+ # Initialize session state for chat history and system prompt
23
+ if "messages" not in st.session_state:
24
+ st.session_state.messages = []
25
+ if "system_prompt" not in st.session_state:
26
+ st.session_state.system_prompt = ""
27
+
28
+ def stream_chat(prompt: str) -> Iterator[str]:
29
+ """Stream chat responses from the LLM API"""
30
+ client = OpenAI(
31
+ api_key=os.getenv('API_KEY'),
32
+ base_url=os.getenv('API_BASE_URL')
33
+ )
34
+
35
+ messages = []
36
+ if st.session_state.system_prompt:
37
+ messages.append({"role": "system", "content": st.session_state.system_prompt})
38
+ messages.extend(st.session_state.messages)
39
+ # messages.append({"role": "user", "content": prompt})
40
+
41
+ stream = client.chat.completions.create(
42
+ messages=messages,
43
+ model=os.getenv('LLM_MODEL_NAME'),
44
+ stream=True
45
+ )
46
+
47
+ for chunk in stream:
48
+ if chunk.choices[0].delta.content is not None:
49
+ yield chunk.choices[0].delta.content
50
+
51
+ def clear_chat_history():
52
+ """Clear all chat messages and reset system prompt"""
53
+ st.session_state.messages = []
54
+ st.session_state.system_prompt = ""
55
+
56
+ def main():
57
+ st.title("💬 LLM Taiwan Chat")
58
+
59
+ # Add a clear chat button with custom styling
60
+ col1, col2 = st.columns([6, 1])
61
+ with col2:
62
+ if st.button("🗑️", type="secondary", use_container_width=True):
63
+ clear_chat_history()
64
+ st.rerun()
65
+
66
+ # System prompt input
67
+ system_prompt = st.text_area(
68
+ "System Prompt 設定:",
69
+ value=st.session_state.system_prompt,
70
+ help="設定 system prompt 來定義 AI 助理的行為和角色。開始對話後將無法修改。",
71
+ height=100,
72
+ disabled=len(st.session_state.messages) > 0 # 當有對話時設為唯讀
73
+ )
74
+ if not st.session_state.messages and system_prompt != st.session_state.system_prompt:
75
+ st.session_state.system_prompt = system_prompt
76
+
77
+ # Display chat messages
78
+ for message in st.session_state.messages:
79
+ with st.chat_message(message["role"]):
80
+ st.write(message["content"])
81
+
82
+ # Chat input
83
+ if prompt := st.chat_input("輸入您的訊息..."):
84
+ # Add user message to chat history
85
+ st.session_state.messages.append({"role": "user", "content": prompt})
86
+
87
+ # Display user message
88
+ with st.chat_message("user"):
89
+ st.write(prompt)
90
+
91
+ # Display assistant response with streaming
92
+ with st.chat_message("assistant"):
93
+ response_placeholder = st.empty()
94
+ full_response = ""
95
+
96
+ # Stream the response
97
+ for response_chunk in stream_chat(prompt):
98
+ full_response += response_chunk
99
+ response_placeholder.markdown(full_response + "▌")
100
+ response_placeholder.markdown(full_response)
101
+
102
+ # Add assistant response to chat history
103
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
104
+
105
+ if __name__ == "__main__":
106
+ main()
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit==1.40.2
2
+ openai==1.55.3
3
+ arize-phoenix==5.12.0
4
+ openinference-instrumentation==0.1.18
5
+ openinference-instrumentation-openai==0.1.18