import os import json import time from langchain_openai import ChatOpenAI from langchain.chains.conversational_retrieval.base import ConversationalRetrievalChain from langchain.memory.buffer_window import ConversationBufferWindowMemory from langchain_core.prompts import PromptTemplate ### Contextualize question ### from upstash_vector import Index from langchain_community.vectorstores.upstash import UpstashVectorStore settings= json.load(open("system.json","r"))[0] from upstash_vector import Index from langchain_community.vectorstores.upstash import UpstashVectorStore index = Index(os.environ["UPSTASH_VECTOR_REST_URL"],os.environ["UPSTASH_VECTOR_REST_TOKEN"]) vectorStore = UpstashVectorStore( embedding=True, index=index, ) retriever = vectorStore.as_retriever(search_kwargs={"k": settings["k"]}) #LLM setup LLM= ChatOpenAI(model=settings["model"], temperature=settings["temp"]) #Setup prompt template prompt_temp=""" You are an AI Chatbot from precious Plastic your job is to answer Question about recycling plastic. You can return links to in the answer as well as image if you want Us the following context to help in answering the Question. ------ {context} ------ Question: {question} Do not: ・ Do not make thing up that you do not know, if you dont know, say that you dont know \ """ QUESTION_PROMPT = PromptTemplate( template=prompt_temp, # プロンプトテンプレートをセット input_variables=["context", "question"] # プロンプトに挿入する変数 ) # Conversation memory memory = ConversationBufferWindowMemory( memory_key=settings["MEMORY_KEY"], # Memory key メモリーのキー名 output_key="answer", #output key 出力ののキー名 k=8, #saved conversation number 保持する会話の履歴数 return_messages=True, #get chat list チャット履歴をlistで取得する場合はTrue )