wholewhale
commited on
Commit
•
d8804c0
1
Parent(s):
e8c47c5
anthropic
Browse files
app.py
CHANGED
@@ -9,28 +9,53 @@ from langchain.embeddings import OpenAIEmbeddings
|
|
9 |
from langchain.vectorstores import Chroma
|
10 |
from langchain.chains import ConversationalRetrievalChain
|
11 |
|
|
|
|
|
12 |
os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")
|
13 |
|
14 |
# Global variable for tracking last interaction time
|
15 |
last_interaction_time = 0
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
def loading_pdf():
|
18 |
return "Working on the upload. Also, pondering the usefulness of sporks..."
|
19 |
|
20 |
def pdf_changes(pdf_doc):
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
def clear_data():
|
36 |
global qa
|
@@ -44,21 +69,17 @@ def add_text(history, text):
|
|
44 |
return history, ""
|
45 |
|
46 |
def bot(history):
|
47 |
-
response =
|
48 |
-
|
|
|
49 |
history[-1][1] = formatted_response
|
50 |
return history
|
51 |
|
52 |
-
def
|
53 |
-
|
54 |
-
for human, ai in history[:-1]:
|
55 |
-
pair = (human, ai)
|
56 |
-
res.append(pair)
|
57 |
-
|
58 |
-
chat_history = res
|
59 |
query = question
|
60 |
-
result =
|
61 |
-
return result[
|
62 |
|
63 |
def auto_clear_data():
|
64 |
global qa, last_interaction_time
|
|
|
9 |
from langchain.vectorstores import Chroma
|
10 |
from langchain.chains import ConversationalRetrievalChain
|
11 |
|
12 |
+
|
13 |
+
os.environ['ANTHROPIC_API_KEY'] = os.getenv("Your_Anthropic_API_Key")
|
14 |
os.environ['OPENAI_API_KEY'] = os.getenv("Your_API_Key")
|
15 |
|
16 |
# Global variable for tracking last interaction time
|
17 |
last_interaction_time = 0
|
18 |
|
19 |
+
# Initialize the Anthropic model instead of OpenAI
|
20 |
+
from anthropic import LanguageModel
|
21 |
+
|
22 |
+
anthropic_model = LanguageModel(api_key=os.environ['ANTHROPIC_API_KEY'], model="some_model")
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
def loading_pdf():
|
27 |
return "Working on the upload. Also, pondering the usefulness of sporks..."
|
28 |
|
29 |
def pdf_changes(pdf_doc):
|
30 |
+
try:
|
31 |
+
if pdf_doc is None:
|
32 |
+
return "No PDF uploaded."
|
33 |
+
|
34 |
+
loader = OnlinePDFLoader(pdf_doc.name)
|
35 |
+
documents = loader.load()
|
36 |
+
|
37 |
+
text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
38 |
+
texts = text_splitter.split_documents(documents)
|
39 |
+
|
40 |
+
# Replace this with your appropriate embeddings class
|
41 |
+
embeddings = OpenAIEmbeddings()
|
42 |
+
|
43 |
+
global db
|
44 |
+
db = Chroma.from_documents(texts, embeddings)
|
45 |
+
retriever = db.as_retriever()
|
46 |
+
|
47 |
+
global qa
|
48 |
+
qa = ConversationalRetrievalChain.from_llm(
|
49 |
+
llm=OpenAI(temperature=0.2, model_name="gpt-3.5-turbo"),
|
50 |
+
retriever=retriever,
|
51 |
+
return_source_documents=False
|
52 |
+
)
|
53 |
+
|
54 |
+
return "Ready"
|
55 |
+
|
56 |
+
except Exception as e:
|
57 |
+
return f"Error processing PDF: {e}"
|
58 |
+
|
59 |
|
60 |
def clear_data():
|
61 |
global qa
|
|
|
69 |
return history, ""
|
70 |
|
71 |
def bot(history):
|
72 |
+
response = infer_anthropic(history[-1][0], history) # Call the new infer function
|
73 |
+
sentences = ' \n'.join(response.split('. '))
|
74 |
+
formatted_response = f"**Bot:**\n\n{sentences}"
|
75 |
history[-1][1] = formatted_response
|
76 |
return history
|
77 |
|
78 |
+
def infer_anthropic(question, history):
|
79 |
+
chat_history = [(human, ai) for human, ai in history[:-1]]
|
|
|
|
|
|
|
|
|
|
|
80 |
query = question
|
81 |
+
result = anthropic_model.query(query, context=chat_history)
|
82 |
+
return result['answer']
|
83 |
|
84 |
def auto_clear_data():
|
85 |
global qa, last_interaction_time
|