Spaces:
Runtime error
Runtime error
initial commit
Browse files- Dockerfile +11 -0
- app.py +79 -0
- requirements.txt +2 -0
Dockerfile
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.11
|
2 |
+
RUN useradd -m -u 1000 user
|
3 |
+
USER user
|
4 |
+
ENV HOME=/home/user \
|
5 |
+
PATH=/home/user/.local/bin:$PATH
|
6 |
+
WORKDIR $HOME/app
|
7 |
+
COPY --chown=user . $HOME/app
|
8 |
+
COPY ./requirements.txt ~/app/requirements.txt
|
9 |
+
RUN pip install -r requirements.txt
|
10 |
+
COPY . .
|
11 |
+
CMD ["chainlit", "run", "app.py", "--port", "7860"]
|
app.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import chainlit as cl
|
2 |
+
from llama_index.llms import MonsterLLM
|
3 |
+
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
|
4 |
+
|
5 |
+
def indexing(llm,path):
|
6 |
+
documents = SimpleDirectoryReader(input_files=[path]).load_data()
|
7 |
+
print("loading done")
|
8 |
+
service_context = ServiceContext.from_defaults(
|
9 |
+
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
|
10 |
+
)
|
11 |
+
print("indexing")
|
12 |
+
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
|
13 |
+
query_engine = index.as_query_engine()
|
14 |
+
print("all done")
|
15 |
+
print(query_engine)
|
16 |
+
return query_engine
|
17 |
+
|
18 |
+
def qa(sp,engine,message):
|
19 |
+
message=message.content
|
20 |
+
ques=sp+" "+message
|
21 |
+
response=engine.query(ques)
|
22 |
+
return response
|
23 |
+
|
24 |
+
@cl.on_chat_start
|
25 |
+
async def factory():
|
26 |
+
url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
|
27 |
+
print(url)
|
28 |
+
if url['output'][-1]=="/":
|
29 |
+
url['output']=url['output'].replace(".ai/",".ai")
|
30 |
+
auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
|
31 |
+
print(auth)
|
32 |
+
model = 'deploy-llm'
|
33 |
+
llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
|
34 |
+
files = None
|
35 |
+
while files is None:
|
36 |
+
files = await cl.AskFileMessage(author="Beast",
|
37 |
+
content="Please upload a PDF file to begin!",
|
38 |
+
accept=["application/pdf"],
|
39 |
+
max_size_mb=20,
|
40 |
+
timeout=180,
|
41 |
+
).send()
|
42 |
+
|
43 |
+
pdf = files[0]
|
44 |
+
print(pdf)
|
45 |
+
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
|
46 |
+
await msg.send()
|
47 |
+
query_engine = await cl.make_async(indexing)(llm,pdf.path)
|
48 |
+
msg.content = f"`{pdf.name}` processed."
|
49 |
+
await msg.update()
|
50 |
+
res = await cl.AskActionMessage(author="Beast",
|
51 |
+
content="Do you want to enter system prompt?",
|
52 |
+
actions=[
|
53 |
+
cl.Action(name="yes", value="yes", label="β
Yes"),
|
54 |
+
cl.Action(name="no", value="no", label="β No"),
|
55 |
+
],
|
56 |
+
).send()
|
57 |
+
|
58 |
+
if res and res.get("value") == "yes":
|
59 |
+
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
|
60 |
+
await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
|
61 |
+
cl.user_session.set("sp", sp["output"])
|
62 |
+
else:
|
63 |
+
await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
|
64 |
+
cl.user_session.set("engine", query_engine)
|
65 |
+
|
66 |
+
|
67 |
+
@cl.on_message
|
68 |
+
async def main(message: cl.Message):
|
69 |
+
msg = cl.Message(author="Beast",content=f"Processing...", disable_feedback=False)
|
70 |
+
await msg.send()
|
71 |
+
engine = cl.user_session.get("engine")
|
72 |
+
sp=cl.user_session.get("sp")
|
73 |
+
if sp==None:
|
74 |
+
sp=""
|
75 |
+
response =await cl.make_async(qa)(sp,engine,message)
|
76 |
+
print(response)
|
77 |
+
msg.content = str(response)
|
78 |
+
await msg.update()
|
79 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
monsterapi
|
2 |
+
git+https://github.com/Vikasqblocks/llama_index.git@f2f04654e9f2cbf1bf765b0d575a6af1f899b18e
|