File size: 2,020 Bytes
30c6ff0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import gradio as gr
import os
from langchain.schema import HumanMessage, AIMessage
from langchain.agents import AgentExecutor, create_openai_tools_agent
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from rag import create_rag_tool
from leave import LeaveRequestInfoTool, SQLAgentTool, prompt


# Initialize LLM
key = "sk-proj-LdVhjM2bTI27bA3grOK8T3BlbkFJh5whi2UHYKkgM2pNwpbe"
os.environ["OPENAI_API_KEY"] = key

llm = ChatOpenAI(model="gpt-4", temperature=0)

# Add the RAGTool to the list of tools
rag_tool = create_rag_tool(llm=llm)
leave_request_info_tool = LeaveRequestInfoTool()
sql_tool = SQLAgentTool()

tools = [leave_request_info_tool, sql_tool, rag_tool]

agent = create_openai_tools_agent(llm, tools, prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)

def truncate_chat_history(chat_history, max_tokens=3000):
    total_tokens = sum(len(message.content.split()) for message in chat_history)
    while total_tokens > max_tokens and chat_history:
        chat_history.pop(0)
        total_tokens = sum(len(message.content.split()) for message in chat_history)
    return chat_history

def handle_user_input(user_input, chat_history):
    if chat_history is None:
        chat_history = []
    
    chat_history.append(HumanMessage(content=user_input))
    truncated_chat_history = truncate_chat_history(chat_history)
    
    response = agent_executor.invoke(
        {"input": user_input, "chat_history": truncated_chat_history}
    )
    ai_response = response['output']
    
    chat_history.append(AIMessage(content=ai_response))
    
    messages = [(message.content, "user" if isinstance(message, HumanMessage) else "bot") for message in chat_history]
    return messages, chat_history

with gr.Blocks() as demo:
    gr.Markdown("# HR Assistant Chatbot")
    chatbot = gr.Chatbot()
    state = gr.State()
    txt = gr.Textbox(placeholder="Type your message here...")

    txt.submit(handle_user_input, [txt, state], [chatbot, state])

demo.launch(share = True)