|
|
|
import streamlit as st |
|
from openai import OpenAI |
|
|
|
st.title("Mini Project 2: Streamlit Chatbot") |
|
|
|
|
|
openai_key = "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" |
|
client = OpenAI(api_key=openai_key) |
|
|
|
|
|
def get_conversation() -> str: |
|
|
|
conversation = "" |
|
for message in st.session_state.messages: |
|
role = message["role"] |
|
content = message["content"] |
|
conversation += f"{role}: {content}\n" |
|
return conversation |
|
|
|
|
|
if "openai_model" not in st.session_state: |
|
st.session_state["openai_model"] = "gpt-3.5-turbo" |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("What would you like to chat about?"): |
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
|
|
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
|
|
with st.chat_message("assistant"): |
|
|
|
response = client.chat.completions.create( |
|
model=st.session_state["openai_model"], |
|
messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state.messages] |
|
) |
|
ai_response = response.choices[0].message.content |
|
|
|
|
|
st.markdown(ai_response) |
|
|
|
|
|
st.session_state.messages.append({"role": "assistant", "content": ai_response}) |