Spaces:
Sleeping
Sleeping
naveenvenkatesh
commited on
Commit
•
64a2916
1
Parent(s):
44b746f
Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,7 @@ from langchain.text_splitter import CharacterTextSplitter
|
|
2 |
from langchain.embeddings import OpenAIEmbeddings
|
3 |
from langchain.vectorstores import FAISS
|
4 |
from langchain.chat_models import ChatOpenAI
|
|
|
5 |
from langchain.memory import ConversationBufferMemory
|
6 |
from langchain.chains import ConversationChain
|
7 |
from langchain.chains import ConversationalRetrievalChain
|
@@ -24,6 +25,7 @@ from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_age
|
|
24 |
from langchain.agents.agent_types import AgentType
|
25 |
# from langchain.agents import create_csv_agent
|
26 |
from langchain import OpenAI, LLMChain
|
|
|
27 |
class ChatDocumentQA:
|
28 |
def __init__(self) -> None:
|
29 |
pass
|
@@ -112,7 +114,7 @@ class ChatDocumentQA:
|
|
112 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
113 |
|
114 |
# llm = ChatOpenAI(temperature=0)
|
115 |
-
llm=
|
116 |
|
117 |
return ConversationalRetrievalChain.from_llm(llm=llm, retriever=vectorstore.as_retriever(),
|
118 |
condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|
|
|
2 |
from langchain.embeddings import OpenAIEmbeddings
|
3 |
from langchain.vectorstores import FAISS
|
4 |
from langchain.chat_models import ChatOpenAI
|
5 |
+
from langchain_openai import AzureChatOpenAI
|
6 |
from langchain.memory import ConversationBufferMemory
|
7 |
from langchain.chains import ConversationChain
|
8 |
from langchain.chains import ConversationalRetrievalChain
|
|
|
25 |
from langchain.agents.agent_types import AgentType
|
26 |
# from langchain.agents import create_csv_agent
|
27 |
from langchain import OpenAI, LLMChain
|
28 |
+
|
29 |
class ChatDocumentQA:
|
30 |
def __init__(self) -> None:
|
31 |
pass
|
|
|
114 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
115 |
|
116 |
# llm = ChatOpenAI(temperature=0)
|
117 |
+
llm=AzureChatOpenAI(azure_deployment = "ChatGPT")
|
118 |
|
119 |
return ConversationalRetrievalChain.from_llm(llm=llm, retriever=vectorstore.as_retriever(),
|
120 |
condense_question_prompt=CONDENSE_QUESTION_PROMPT,
|