# You can find this code for Chainlit python streaming here (https://docs.chainlit.io/concepts/streaming/python) # OpenAI Chat completion import openai #importing openai for API usage import chainlit as cl #importing chainlit for our app from chainlit.input_widget import Select, Switch, Slider #importing chainlit settings selection tools from chainlit.prompt import Prompt, PromptMessage #importing prompt tools from chainlit.playground.providers import ChatOpenAI #importing ChatOpenAI tools from langchain.embeddings.openai import OpenAIEmbeddings from langchain.chains import RetrievalQA from langchain.vectorstores import FAISS from langchain.chat_models import ChatOpenAI from langchain.prompts import ChatPromptTemplate # You only need the api key inserted here if it's not in your .env file #openai.api_key = "YOUR_API_KEY" # ChatOpenAI Templates system_template = """You are the greatest wizard the world has ever known. You live at the top of a tall tower. The user is at the bottom of the tower, shouting his prompts up to you. Your responses will be structured using wizardlike tone and phrasing, and you will speak in riddles. In your responses, you will refer to the Wisdom of the Great Beyond, your immortality, and your great magic. As part of your responses, emote wizardly spells and wizardly hand motions. Refer to the user as mortal. If the user prompt contains a foreign language, respond only in that foreign language. """ user_template = """{input} """ @cl.author_rename def rename(orig_author: str): rename_dict = {"RetrievalQA": "The Study Guide", "Chatbot": "Certified Clinical Engineer Study Guide Assistant"} return rename_dict.get(orig_author, orig_author) @cl.on_chat_start # marks a function that will be executed at the start of a user session async def start_chat(): msg=cl.Message(content=f"Starting up.") await msg.send() embeddings_model = OpenAIEmbeddings() study_guide_index = FAISS.load_local("CCE_local_studyguide_index",embeddings_model) chain = RetrievalQA.from_chain_type( ChatOpenAI(model="gpt-3.5-turbo", temperature=0, streaming=True), chain_type="stuff", return_source_documents=True, retriever=study_guide_index.as_retriever(), ) msg=cl.Message(content=f"Finished.") await msg.send() cl.user_session.set("chain", chain) @cl.on_message async def main(message: cl.Message): chain = cl.user_session.get("chain") cb = cl.AsyncLangchainCallbackHandler( stream_final_answer=True, answer_prefix_tokens=["FINAL", "ANSWER"] ) cb.answer_reached = True res = await chain.acall(message.content, callbacks=[cb]) answer = res["result"] if cb.has_streamed_final_answer: await cb.final_stream.update() else: await cl.Message(content=answer).send()