neon_arch commited on
Commit
a3edf70
1 Parent(s): 3aeb3b3

✨ feat: add code to pass engine errors to the UI

Browse files
src/search_results_handler/aggregator.rs CHANGED
@@ -8,7 +8,7 @@ use rand::Rng;
8
  use tokio::task::JoinHandle;
9
 
10
  use super::{
11
- aggregation_models::{RawSearchResult, SearchResult, SearchResults},
12
  user_agent::random_user_agent,
13
  };
14
 
@@ -18,6 +18,8 @@ use crate::engines::{
18
  searx,
19
  };
20
 
 
 
21
  /// A function that aggregates all the scraped results from the above upstream engines and
22
  /// then removes duplicate results and if two results are found to be from two or more engines
23
  /// then puts their names together to show the results are fetched from these upstream engines
@@ -70,49 +72,48 @@ pub async fn aggregate(
70
 
71
  let task_capacity: usize = search_engines.len();
72
 
73
- let tasks: Vec<JoinHandle<Result<HashMap<String, RawSearchResult>, Report<EngineError>>>> =
74
- search_engines
75
- .into_iter()
76
- .map(|search_engine| {
77
- let query: String = query.clone();
78
- let user_agent: String = user_agent.clone();
79
- tokio::spawn(
80
- async move { search_engine.results(query, page, user_agent.clone()).await },
81
- )
82
- })
83
- .collect();
84
 
85
  let mut outputs = Vec::with_capacity(task_capacity);
86
 
87
  for task in tasks {
88
  if let Ok(result) = task.await {
89
- outputs.push(result.ok())
90
  }
91
  }
92
 
 
 
93
  let mut initial: bool = true;
94
  let mut counter: usize = 0;
95
  outputs.iter().for_each(|results| {
96
  if initial {
97
  match results {
98
- Some(result) => {
99
  result_map.extend(result.clone());
100
  counter += 1;
101
  initial = false
102
  }
103
- None => {
104
- if debug {
105
- log::error!(
106
- "Error fetching results from {}",
107
- upstream_search_engines[counter]
108
- );
109
- };
110
  counter += 1
111
  }
112
  }
113
  } else {
114
  match results {
115
- Some(result) => {
116
  result.clone().into_iter().for_each(|(key, value)| {
117
  result_map
118
  .entry(key)
@@ -130,13 +131,11 @@ pub async fn aggregate(
130
  });
131
  counter += 1
132
  }
133
- None => {
134
- if debug {
135
- log::error!(
136
- "Error fetching results from {}",
137
- upstream_search_engines[counter]
138
- );
139
- };
140
  counter += 1
141
  }
142
  }
@@ -157,5 +156,6 @@ pub async fn aggregate(
157
  })
158
  .collect(),
159
  query.to_string(),
 
160
  ))
161
  }
 
8
  use tokio::task::JoinHandle;
9
 
10
  use super::{
11
+ aggregation_models::{EngineErrorInfo, RawSearchResult, SearchResult, SearchResults},
12
  user_agent::random_user_agent,
13
  };
14
 
 
18
  searx,
19
  };
20
 
21
+ type FutureVec = Vec<JoinHandle<Result<HashMap<String, RawSearchResult>, Report<EngineError>>>>;
22
+
23
  /// A function that aggregates all the scraped results from the above upstream engines and
24
  /// then removes duplicate results and if two results are found to be from two or more engines
25
  /// then puts their names together to show the results are fetched from these upstream engines
 
72
 
73
  let task_capacity: usize = search_engines.len();
74
 
75
+ let tasks: FutureVec = search_engines
76
+ .into_iter()
77
+ .map(|search_engine| {
78
+ let query: String = query.clone();
79
+ let user_agent: String = user_agent.clone();
80
+ tokio::spawn(
81
+ async move { search_engine.results(query, page, user_agent.clone()).await },
82
+ )
83
+ })
84
+ .collect();
 
85
 
86
  let mut outputs = Vec::with_capacity(task_capacity);
87
 
88
  for task in tasks {
89
  if let Ok(result) = task.await {
90
+ outputs.push(result)
91
  }
92
  }
93
 
94
+ let mut engine_errors_info: Vec<EngineErrorInfo> = Vec::new();
95
+
96
  let mut initial: bool = true;
97
  let mut counter: usize = 0;
98
  outputs.iter().for_each(|results| {
99
  if initial {
100
  match results {
101
+ Ok(result) => {
102
  result_map.extend(result.clone());
103
  counter += 1;
104
  initial = false
105
  }
106
+ Err(error_type) => {
107
+ engine_errors_info.push(EngineErrorInfo::new(
108
+ error_type.downcast_ref::<EngineError>().unwrap(),
109
+ upstream_search_engines[counter].clone(),
110
+ ));
 
 
111
  counter += 1
112
  }
113
  }
114
  } else {
115
  match results {
116
+ Ok(result) => {
117
  result.clone().into_iter().for_each(|(key, value)| {
118
  result_map
119
  .entry(key)
 
131
  });
132
  counter += 1
133
  }
134
+ Err(error_type) => {
135
+ engine_errors_info.push(EngineErrorInfo::new(
136
+ error_type.downcast_ref::<EngineError>().unwrap(),
137
+ upstream_search_engines[counter].clone(),
138
+ ));
 
 
139
  counter += 1
140
  }
141
  }
 
156
  })
157
  .collect(),
158
  query.to_string(),
159
+ engine_errors_info,
160
  ))
161
  }