theAIguy commited on
Commit
e7f437a
·
1 Parent(s): ae7a494

add: web search tool for fact checking

Browse files
Files changed (2) hide show
  1. Gradio_UI.py +4 -3
  2. app.py +7 -9
Gradio_UI.py CHANGED
@@ -151,7 +151,8 @@ def stream_to_gradio(
151
  for message in pull_messages_from_step(
152
  step_log,
153
  ):
154
- yield message
 
155
 
156
  final_answer = step_log # Last log is the run's final_answer
157
  final_answer = handle_agent_output_types(final_answer)
@@ -159,7 +160,7 @@ def stream_to_gradio(
159
  if isinstance(final_answer, AgentText):
160
  yield gr.ChatMessage(
161
  role="assistant",
162
- content=f"**Final answer:**\n{final_answer.to_string()}\n",
163
  )
164
  elif isinstance(final_answer, AgentImage):
165
  yield gr.ChatMessage(
@@ -172,7 +173,7 @@ def stream_to_gradio(
172
  content={"path": final_answer.to_string(), "mime_type": "audio/wav"},
173
  )
174
  else:
175
- yield gr.ChatMessage(role="assistant", content=f"**Final answer:** {str(final_answer)}")
176
 
177
 
178
  class GradioUI:
 
151
  for message in pull_messages_from_step(
152
  step_log,
153
  ):
154
+ #yield message
155
+ continue
156
 
157
  final_answer = step_log # Last log is the run's final_answer
158
  final_answer = handle_agent_output_types(final_answer)
 
160
  if isinstance(final_answer, AgentText):
161
  yield gr.ChatMessage(
162
  role="assistant",
163
+ content=f"**Final verdict:**\n{final_answer.to_string()}\n",
164
  )
165
  elif isinstance(final_answer, AgentImage):
166
  yield gr.ChatMessage(
 
173
  content={"path": final_answer.to_string(), "mime_type": "audio/wav"},
174
  )
175
  else:
176
+ yield gr.ChatMessage(role="assistant", content=f"**Final verdict:** {str(final_answer)}")
177
 
178
 
179
  class GradioUI:
app.py CHANGED
@@ -9,14 +9,13 @@ from Gradio_UI import GradioUI
9
 
10
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
15
  Args:
16
- arg1: the first argument
17
- arg2: the second argument
18
  """
19
- return "What magic will you build ?"
 
20
 
21
  @tool
22
  def get_current_time_in_timezone(timezone: str) -> str:
@@ -46,16 +45,15 @@ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may
46
  custom_role_conversions=None,
47
  )
48
 
49
-
50
  # Import tool from Hub
51
- image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
52
 
53
  with open("prompts.yaml", 'r') as stream:
54
  prompt_templates = yaml.safe_load(stream)
55
 
56
  agent = CodeAgent(
57
  model=model,
58
- tools=[final_answer], ## add your tools here (don't remove final answer)
59
  max_steps=6,
60
  verbosity_level=1,
61
  grammar=None,
 
9
 
10
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
+ def web_search(query:str)-> str:
13
+ """A tool that searches a given query over the internet and brings top-10 results.
 
14
  Args:
15
+ query: query to search over the internet
 
16
  """
17
+ tool = DuckDuckGoSearchTool(max_results=10)
18
+ return tool(query)
19
 
20
  @tool
21
  def get_current_time_in_timezone(timezone: str) -> str:
 
45
  custom_role_conversions=None,
46
  )
47
 
 
48
  # Import tool from Hub
49
+ # image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
50
 
51
  with open("prompts.yaml", 'r') as stream:
52
  prompt_templates = yaml.safe_load(stream)
53
 
54
  agent = CodeAgent(
55
  model=model,
56
+ tools=[final_answer, web_search], ## add your tools here (don't remove final answer)
57
  max_steps=6,
58
  verbosity_level=1,
59
  grammar=None,