|
import logging |
|
import os |
|
from subprocess import check_call, STDOUT |
|
from time import asctime |
|
|
|
import gradio as gr |
|
from llama_index.core import Document, VectorStoreIndex |
|
|
|
from generate_response import generate_chat_response_with_history, set_llm, is_search_query, condense_question, \ |
|
generate_chat_response_with_history_rag_return_response |
|
from web_search import search |
|
|
|
API_KEY_PATH = "../keys/gpt_api_key.txt" |
|
logger = logging.getLogger("agent_logger") |
|
sourced = False |
|
query = False |
|
rag_similarity = False |
|
|
|
|
|
def google_search_chat(message, history): |
|
condensed_question = condense_question(message, history) |
|
if is_search_query(condensed_question): |
|
search_results = search(message, condensed_question) |
|
relevant_content = "" |
|
sources = "" |
|
for index, result in enumerate(search_results): |
|
relevant_content = relevant_content + "\n" + ''.join(result['text']) |
|
sources = sources + f'\n {index + 1}. ' + result['url'] |
|
|
|
if relevant_content != "": |
|
documents = [Document(text=relevant_content)] |
|
index = VectorStoreIndex.from_documents(documents) |
|
|
|
response = generate_chat_response_with_history_rag_return_response(index, message, history) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
response_text = [] |
|
string_output = "" |
|
|
|
for text in response.response_gen: |
|
response_text.append(text) |
|
string_output = ''.join(response_text) |
|
yield string_output |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger.info(f'Assistant Response: {string_output}') |
|
else: |
|
logger.info( |
|
f'Assistant Response: Sorry, no search results found.') |
|
yield "Sorry, no search results found." |
|
|
|
else: |
|
yield from generate_chat_response_with_history(message, history) |
|
|
|
|
|
if __name__ == '__main__': |
|
logging.root.setLevel(logging.INFO) |
|
filehandler = logging.FileHandler(f'agent_log_{asctime().replace(" ", "").lower().replace(":", "")}.log', |
|
'a') |
|
formatter = logging.Formatter('%(asctime)-15s::%(levelname)s::%(filename)s::%(funcName)s::%(lineno)d::%(message)s') |
|
filehandler.setFormatter(formatter) |
|
logger = logging.getLogger("agent_logger") |
|
for hdlr in logger.handlers[:]: |
|
if isinstance(hdlr, logging.FileHandler): |
|
logger.removeHandler(hdlr) |
|
logger.addHandler(filehandler) |
|
logger.setLevel(logging.INFO) |
|
|
|
logging.getLogger('selenium.webdriver.common').setLevel(logging.DEBUG) |
|
|
|
api_key = os.getenv('gpt_api_key') |
|
|
|
|
|
|
|
|
|
set_llm(key=api_key, model="gpt-4-0125-preview", temperature=0) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger.info("Launching Gradio ChatInterface for searchbot...") |
|
|
|
demo = gr.ChatInterface(fn=google_search_chat, |
|
title="Search Assistant", retry_btn=None, undo_btn=None, clear_btn=None, |
|
theme="soft") |
|
demo.launch(auth=('convo', 'session2024')) |
|
|