arabellastrange commited on
Commit
6373c5e
·
1 Parent(s): a8c00ab

adding sources

Browse files
Files changed (1) hide show
  1. app.py +2 -39
app.py CHANGED
@@ -11,9 +11,6 @@ from web_search import search
11
 
12
  API_KEY_PATH = "../keys/gpt_api_key.txt"
13
  logger = logging.getLogger("agent_logger")
14
- sourced = False
15
- query = False
16
- rag_similarity = False
17
 
18
 
19
  def google_search_chat(message, history):
@@ -33,24 +30,6 @@ def google_search_chat(message, history):
33
  print('Search results vectorized...')
34
  response = generate_chat_response_with_history_rag_return_response(index, message, history)
35
 
36
- # similar_str = "not calculated"
37
- # faithfulness_str = "not calculated"
38
- #
39
- # if rag_similarity:
40
- # sim_evaluator = SemanticSimilarityEvaluator()
41
- # faith_evaluator = FaithfulnessEvaluator(llm=get_llm())
42
- # # condensed_context = condense_context(relevant_content)
43
- # # logger.info("Calculating similarity...")
44
- # # similar = sim_evaluator.evaluate(response=str(response),
45
- # # reference=condensed_context)
46
- # logger.info("Calculating faithfulness...")
47
- # faithfulness = faith_evaluator.evaluate_response(query=condensed_question, response=response)
48
- # # similar_str = str(round((similar.score * 100), 2)) + "%"
49
- # faithfulness_str = "Yes" if faithfulness.passing else "No"
50
- #
51
- # logger.info(f'**Search Query:** {condensed_question} \n **Faithfulness:** {faithfulness_str} \n '
52
- # f'**Similarity:** {similar_str} \n **Sources used:** \n {sources}')
53
-
54
  response_text = []
55
  string_output = ""
56
 
@@ -58,29 +37,13 @@ def google_search_chat(message, history):
58
  response_text.append(text)
59
  string_output = ''.join(response_text)
60
  yield string_output
61
-
62
- # if not sourced:
63
- # pass
64
- # if sourced and not query and not rag_similarity:
65
- # yield string_output + f'\n\n --- \n **Sources used:** \n {sources}'
66
- # if sourced and query and not rag_similarity:
67
- # yield (string_output
68
- # + f'\n\n --- \n **Search Query:** {condensed_question} '
69
- # f'\n **Sources used:** \n {sources}')
70
- # if rag_similarity:
71
- # yield (string_output
72
- # + f'\n\n --- \n **Search Query:** {condensed_question} \n '
73
- # # f'**Similarity of response to the sources [ℹ️]'
74
- # # f'(https://en.wikipedia.org/wiki/Semantic_similarity):** {similar_str} \n'
75
- # f'**Is response in source documents?**: {faithfulness_str}'
76
- # f'\n **Sources used:** \n {sources}')
77
 
78
  print(f'Assistant Response: {string_output}')
79
  else:
80
  print(
81
  f'Assistant Response: Sorry, no search results found.')
82
  yield "Sorry, no search results found."
83
-
84
  else:
85
  yield from generate_chat_response_with_history(message, history)
86
 
@@ -105,7 +68,7 @@ if __name__ == '__main__':
105
  # https://openai.com/blog/new-embedding-models-and-api-updates
106
  set_llm(key=api_key, model="gpt-4-0125-preview", temperature=0)
107
 
108
- print("Launching Gradio ChatInterface for searchbot...")
109
 
110
  demo = gr.ChatInterface(fn=google_search_chat,
111
  title="Search Assistant", retry_btn=None, undo_btn=None, clear_btn=None,
 
11
 
12
  API_KEY_PATH = "../keys/gpt_api_key.txt"
13
  logger = logging.getLogger("agent_logger")
 
 
 
14
 
15
 
16
  def google_search_chat(message, history):
 
30
  print('Search results vectorized...')
31
  response = generate_chat_response_with_history_rag_return_response(index, message, history)
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  response_text = []
34
  string_output = ""
35
 
 
37
  response_text.append(text)
38
  string_output = ''.join(response_text)
39
  yield string_output
40
+ yield string_output + f'\n\n --- \n **Sources used:** \n {sources}'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  print(f'Assistant Response: {string_output}')
43
  else:
44
  print(
45
  f'Assistant Response: Sorry, no search results found.')
46
  yield "Sorry, no search results found."
 
47
  else:
48
  yield from generate_chat_response_with_history(message, history)
49
 
 
68
  # https://openai.com/blog/new-embedding-models-and-api-updates
69
  set_llm(key=api_key, model="gpt-4-0125-preview", temperature=0)
70
 
71
+ print("Launching Gradio ChatInterface for searchbot_sourced...")
72
 
73
  demo = gr.ChatInterface(fn=google_search_chat,
74
  title="Search Assistant", retry_btn=None, undo_btn=None, clear_btn=None,