Spaces:
Runtime error
Runtime error
import chainlit as cl | |
from llama_index.llms import MonsterLLM | |
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext | |
from monsterapi import client as mclient | |
import json | |
def indexing(llm,path): | |
documents = SimpleDirectoryReader(input_files=[path]).load_data() | |
service_context = ServiceContext.from_defaults( | |
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5" | |
) | |
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True) | |
query_engine = index.as_query_engine() | |
cl.user_session.set("engine", query_engine) | |
def qa(sp,message): | |
engine=cl.user_session.get("engine") | |
message=message.content | |
ques=sp+" "+message | |
response=engine.query(ques) | |
return response | |
async def factory(): | |
url = await cl.AskUserMessage(author="Beast",content="Enter URL of your deployment").send() | |
index_ai = url['output'].find(".monsterapi.ai") | |
url_ai = url['output'][:index_ai + len(".monsterapi.ai")] | |
auth = await cl.AskUserMessage(author="Beast",content="Enter auth-token of your deployment").send() | |
model = 'deploy-llm' | |
llm = MonsterLLM(model=model,base_url=url_ai,monster_api_key=auth['output'],temperature=0.75, context_window=1024) | |
service_client = mclient(api_key = auth['output'], base_url = url_ai) | |
cl.user_session.set("service_client",service_client) | |
cl.user_session.set("llm", llm) | |
res = await cl.AskActionMessage(author="Beast", | |
content="Do you want to enter system prompt?", | |
actions=[ | |
cl.Action(name="yes", value="yes", label="β Yes"), | |
cl.Action(name="no", value="no", label="β No"), | |
], | |
).send() | |
if res and res.get("value") == "yes": | |
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send() | |
msg=cl.Message(author="Beast",content="Noted. Go ahead as your questions!!") | |
await msg.send() | |
cl.user_session.set("sp", sp["output"]) | |
else: | |
await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send() | |
async def main(message: cl.Message): | |
service_client=cl.user_session.get("service_client") | |
engine = cl.user_session.get("engine") | |
llm=cl.user_session.get("llm") | |
sp=cl.user_session.get("sp") | |
if sp==None: | |
sp="" | |
if message.elements: | |
go=True | |
for file in message.elements: | |
if "pdf" in file.mime: | |
pdf=file | |
else: | |
await cl.Message(author="Beast",content="We only support PDF for now").send() | |
go=False | |
break | |
if go: | |
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...") | |
await msg.send() | |
await cl.make_async(indexing)(llm,pdf.path) | |
msg.content = f"`{pdf.name}` processed." | |
await msg.update() | |
msg = cl.Message(author="Beast",content=f"Generating Response...") | |
await msg.send() | |
response =await cl.make_async(qa)(sp,message) | |
print(response) | |
msg.content = str(response) | |
await msg.update() | |
comp = await cl.AskActionMessage(author="Beast", | |
content="Do you want answer without RAG?", | |
actions=[ | |
cl.Action(name="yes", value="yes", label="β Yes"), | |
cl.Action(name="no", value="no", label="β No"), | |
], | |
).send() | |
elif not message.elements and engine!=None: | |
msg = cl.Message(author="Beast",content=f"Generating Response...") | |
await msg.send() | |
response =await cl.make_async(qa)(sp,message) | |
print(response) | |
msg.content = str(response) | |
await msg.update() | |
comp = await cl.AskActionMessage(author="Beast", | |
content="Do you want answer without RAG?", | |
actions=[ | |
cl.Action(name="yes", value="yes", label="β Yes"), | |
cl.Action(name="no", value="no", label="β No"), | |
], | |
).send() | |
if (not message.elements and engine==None) or (comp.get("value") == "yes"): | |
msg = cl.Message(author="Beast",content=f"Generating Response...") | |
await msg.send() | |
payload = { | |
"input_variables": {"system": sp, | |
"prompt":message.content}, | |
"stream": False, | |
"temperature": 0.6, | |
"max_tokens": 512 | |
} | |
output = service_client.generate(model = "deploy-llm", data = payload) | |
msg.content = str(output['text'][0]) | |
await msg.update() | |
else: | |
cl.Message(author="Beast",content="Broken ;(") |