Spaces:
Runtime error
Runtime error
AI-RESEARCHER-2024
commited on
Commit
•
d8472fa
1
Parent(s):
84f58ef
Update app.py
Browse files
app.py
CHANGED
@@ -1,44 +1,88 @@
|
|
|
|
1 |
import chainlit as cl
|
2 |
-
from
|
3 |
-
from langchain.
|
4 |
-
from
|
|
|
|
|
5 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
|
6 |
|
7 |
-
import os
|
8 |
-
import sys
|
9 |
|
10 |
-
#
|
11 |
-
|
|
|
12 |
|
13 |
-
#
|
14 |
-
|
15 |
-
#
|
16 |
-
|
17 |
-
|
18 |
-
os.system(f'ollama pull {model}')
|
19 |
|
20 |
-
|
|
|
21 |
|
|
|
22 |
|
23 |
-
|
24 |
-
api_key="ollama",
|
25 |
-
model='llama3.2',
|
26 |
-
base_url="http://localhost:11434/v1",
|
27 |
-
temperature=0
|
28 |
-
)
|
29 |
|
30 |
-
|
31 |
-
persist_directory = 'mydb'
|
32 |
-
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
|
33 |
|
34 |
-
|
35 |
-
|
|
|
|
|
|
|
36 |
|
37 |
-
|
38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
|
40 |
-
# Define the Chainlit app
|
41 |
@cl.on_message
|
42 |
-
def main(message):
|
43 |
-
|
44 |
-
cl.Message(content=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
import chainlit as cl
|
3 |
+
from langchain_community.llms import Ollama
|
4 |
+
from langchain.prompts import ChatPromptTemplate
|
5 |
+
from langchain_core.output_parsers import StrOutputParser
|
6 |
+
from langchain_core.runnables import RunnablePassthrough
|
7 |
+
from langchain_community.vectorstores import Chroma
|
8 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
9 |
+
embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
10 |
|
|
|
|
|
11 |
|
12 |
+
# Load the existing Chroma vector store
|
13 |
+
persist_directory = 'mydb'
|
14 |
+
vectorstore = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
|
15 |
|
16 |
+
# Initialize Ollama LLM
|
17 |
+
llm = Ollama(
|
18 |
+
model="llama3.2", # You can change this to any model you have pulled in Ollama
|
19 |
+
temperature=0
|
20 |
+
)
|
|
|
21 |
|
22 |
+
# Create the RAG prompt template
|
23 |
+
template = """Answer the question based only on the following context:
|
24 |
|
25 |
+
{context}
|
26 |
|
27 |
+
Question: {question}
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
+
Answer the question in a clear and concise way. If you cannot find the answer in the context, just say "I don't have enough information to answer this question."
|
|
|
|
|
30 |
|
31 |
+
Make sure to:
|
32 |
+
1. Only use information from the provided context
|
33 |
+
2. Be concise and direct
|
34 |
+
3. If you're unsure, acknowledge it
|
35 |
+
"""
|
36 |
|
37 |
+
prompt = ChatPromptTemplate.from_template(template)
|
38 |
+
|
39 |
+
@cl.on_chat_start
|
40 |
+
async def start():
|
41 |
+
# Send initial message
|
42 |
+
await cl.Message(
|
43 |
+
content="Hi! I'm ready to answer your questions based on the stored documents. What would you like to know?"
|
44 |
+
).send()
|
45 |
|
|
|
46 |
@cl.on_message
|
47 |
+
async def main(message: cl.Message):
|
48 |
+
# Create a loading message
|
49 |
+
msg = cl.Message(content="")
|
50 |
+
await msg.send()
|
51 |
+
|
52 |
+
# Start typing effect
|
53 |
+
async with cl.Step(name="Searching documents..."):
|
54 |
+
try:
|
55 |
+
# Search the vector store
|
56 |
+
retriever = vectorstore.as_retriever(search_kwargs={"k": 3})
|
57 |
+
|
58 |
+
# Create the RAG chain
|
59 |
+
rag_chain = (
|
60 |
+
{"context": retriever, "question": RunnablePassthrough()}
|
61 |
+
| prompt
|
62 |
+
| llm
|
63 |
+
| StrOutputParser()
|
64 |
+
)
|
65 |
+
|
66 |
+
# Execute the chain
|
67 |
+
response = await cl.make_async(rag_chain)(message.content)
|
68 |
+
|
69 |
+
# Update loading message with response
|
70 |
+
await msg.update(content=response)
|
71 |
+
|
72 |
+
# Show source documents
|
73 |
+
docs = retriever.get_relevant_documents(message.content)
|
74 |
+
elements = []
|
75 |
+
for i, doc in enumerate(docs):
|
76 |
+
source_name = f"Source {i+1}"
|
77 |
+
elements.append(
|
78 |
+
cl.Text(name=source_name, content=doc.page_content, display="inline")
|
79 |
+
)
|
80 |
+
|
81 |
+
if elements:
|
82 |
+
await msg.update(elements=elements)
|
83 |
+
|
84 |
+
except Exception as e:
|
85 |
+
await msg.update(content=f"An error occurred: {str(e)}")
|
86 |
+
|
87 |
+
if __name__ == "__main__":
|
88 |
+
cl.run()
|