File size: 2,150 Bytes
3d83ae3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
import chainlit as cl
import asyncio

from langchain_openai.chat_models import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.messages import (
    HumanMessage, 
    SystemMessage
)

MESSAGE_HISTORY = "message_history"
GPT3_MODEL_NAME = "gpt-3.5-turbo-0125"
GPT4_MODEL_NAME = "gpt-4-turbo-preview"
OPENAI_API_KEY=os.environ.get("OPENAI_API_KEY")

print(f"==> OPENAI_API_KEY: {OPENAI_API_KEY}")
_system_message = SystemMessage(content=(
    "You are a helpful and creative assistant who tries your best to answer questions "
    " in the most witty and funny way.  If you feel that a poetic touch is neede to "
    " uplift the mood for the user, go ahead to write a sonnet.  Always be positive, "
    " encouraging, and inspirational if possible."
))
_chat_model: ChatOpenAI

@cl.on_chat_start
async def start():
    global _chat_model
    _chat_model = ChatOpenAI(
        model=GPT4_MODEL_NAME, 
        temperature=0.5
    )
    cl.user_session.set(MESSAGE_HISTORY, [_system_message])
    await cl.Message(
        content="Hello there!  How are you?"
    ).send()

@cl.on_message
async def main(message: cl.Message):
        
    messages = cl.user_session.get(MESSAGE_HISTORY) 
    if not messages:
        messages = [_system_message]

    if len(message.elements) > 0:
        for element in message.elements:
            with open(element.path, "r") as uploaded_file:
                content = uploaded_file.read()
            messages.append(HumanMessage(content=content))
            confirm_message = cl.Message(content=f"Uploaded file: {element.name}")
            await confirm_message.send()

    messages.append(HumanMessage(content=message.content))
    prompt = ChatPromptTemplate.from_messages(messages=messages)
    print(vars(prompt))
    output_parser = StrOutputParser()

    try:
        chain = prompt | _chat_model | output_parser
        response = chain.invoke({"input": message.content})
    except Exception as e:
        response = f"no response: {e}"

    await cl.Message(
        content=response
    ).send()