agent_app / main.py
ridhimamlds's picture
Upload folder using huggingface_hub
30c6ff0 verified
import gradio as gr
import os
from langchain.schema import HumanMessage, AIMessage
from langchain.agents import AgentExecutor, create_openai_tools_agent
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from rag import create_rag_tool
from leave import LeaveRequestInfoTool, SQLAgentTool, prompt
# Initialize LLM
key = "sk-proj-LdVhjM2bTI27bA3grOK8T3BlbkFJh5whi2UHYKkgM2pNwpbe"
os.environ["OPENAI_API_KEY"] = key
llm = ChatOpenAI(model="gpt-4", temperature=0)
# Add the RAGTool to the list of tools
rag_tool = create_rag_tool(llm=llm)
leave_request_info_tool = LeaveRequestInfoTool()
sql_tool = SQLAgentTool()
tools = [leave_request_info_tool, sql_tool, rag_tool]
agent = create_openai_tools_agent(llm, tools, prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
def truncate_chat_history(chat_history, max_tokens=3000):
total_tokens = sum(len(message.content.split()) for message in chat_history)
while total_tokens > max_tokens and chat_history:
chat_history.pop(0)
total_tokens = sum(len(message.content.split()) for message in chat_history)
return chat_history
def handle_user_input(user_input, chat_history):
if chat_history is None:
chat_history = []
chat_history.append(HumanMessage(content=user_input))
truncated_chat_history = truncate_chat_history(chat_history)
response = agent_executor.invoke(
{"input": user_input, "chat_history": truncated_chat_history}
)
ai_response = response['output']
chat_history.append(AIMessage(content=ai_response))
messages = [(message.content, "user" if isinstance(message, HumanMessage) else "bot") for message in chat_history]
return messages, chat_history
with gr.Blocks() as demo:
gr.Markdown("# HR Assistant Chatbot")
chatbot = gr.Chatbot()
state = gr.State()
txt = gr.Textbox(placeholder="Type your message here...")
txt.submit(handle_user_input, [txt, state], [chatbot, state])
demo.launch(share = True)