File size: 3,736 Bytes
73f36ed
dacc583
73f36ed
1c1bc79
dacc583
73f36ed
1c1bc79
 
 
 
 
 
 
 
 
 
be535f1
dacc583
 
 
 
73f36ed
 
 
 
1c1bc79
 
dacc583
 
 
 
 
1c1bc79
 
 
658f185
be535f1
1c1bc79
 
73f36ed
be535f1
1c1bc79
 
 
658f185
dacc583
1c1bc79
 
73f36ed
dacc583
1c1bc79
 
73f36ed
 
 
 
 
 
1c1bc79
 
 
 
 
 
 
 
 
658f185
1c1bc79
 
 
 
73f36ed
be535f1
73f36ed
dacc583
 
 
73f36ed
 
be535f1
1c1bc79
be535f1
 
 
 
 
1c1bc79
 
 
 
 
 
 
 
 
 
 
 
 
dacc583
 
 
 
be535f1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import chainlit as cl
import json
import os
import whisper
from datetime import datetime
from dotenv import load_dotenv
from openai import OpenAI

from classes import SessionState
from utils_callbacks import callback_run_scenario, callback_start_scenario, callback_evaluate_performance, callback_display_queries_responses  
from utils_chain_parameters import prepare_chain_parameters
from utils_control_messages import handle_control_message
from utils_output import display_evaluation_results
from utils_prep import prep_research, prep_sparring, prep_start
from utils_sparring import do_sparring
from utils_voice import reply_with_voice

llm_model = "gpt-4o-mini"
# llm_model = "gpt-4o"
# llm_model = "gpt-3.5-turbo"
# llm_model = "gpt-4o-2024-08-06"

load_dotenv()
openai_api_key = os.getenv("OPENAI_API_KEY")

client = OpenAI(api_key=openai_api_key)
whisper_model = whisper.load_model("base")

#############################################
# Action callbacks
#############################################

# @cl.action_callback("Select a Scenario")
# async def on_action_show_scenarios(action):
#     await callback_show_scenarios(cl)

@cl.action_callback("Scenario")
async def on_action_run_scenario(action):
    await callback_run_scenario(action)

@cl.action_callback("Start Scenario")
async def on_action_start_scenario(action):
    print("on_action_start_scenario()")
    await callback_start_scenario()

@cl.action_callback("Evaluate Performance")
async def on_action_evaluate_performance(action):
    await callback_evaluate_performance()

@cl.action_callback("Display Queries and Responses")
async def on_action_display_queries_responses(action):
    await callback_display_queries_responses()

#############################################
### On Chat Start (Session Start) Section ###
#############################################
@cl.on_chat_start
async def on_chat_start():
    session_state = SessionState()
    cl.user_session.set("session_state", session_state)
    session_state.llm_model = llm_model
    print(session_state)
    cl.user_session.set("messages", [])
    if client is None:
        await cl.Message(content="Error: OpenAI client not initialized. Please check your API key.").send()
    if whisper_model is None:
        await cl.Message(content="Error: Whisper model not loaded. Please check your installation.").send()

    await prep_start(session_state)
    await prep_research(session_state)
    session_state.session_stage = "sparring"
    await prep_sparring(session_state)



#########################################################
### On Message Section - called for each User Message ###
#########################################################
@cl.on_message
async def main(message: cl.Message):
    content = message.content.strip()
    session_state = cl.user_session.get("session_state", None)
    if content.startswith('!'):
        # Tis is a control message
        print("Received control message:", content[1:])
        await handle_control_message(content[1:]) 
        
    elif session_state.session_stage == "research":
        await cl.Message(content="Doing research...").send()
        print("session_stage == research")  
        session_state.session_stage = "question_prep"

    elif session_state.session_stage == "question_prep":
        print("session_stage == question_prep")
        await cl.Message(content="Preparing questions...").send()
        session_state.session_stage = "sparring"

    elif session_state.session_stage == "sparring":
        # await prep_sparring(session_state)
        await do_sparring(client, session_state, message)
#############################################
### Support Functions
#############################################