Spaces:
Runtime error
Runtime error
Merge branch 'rolling' into optimise-releases
Browse files- public/static/index.js +1 -1
- src/config_parser/parser.rs +1 -1
- src/engines/duckduckgo.rs +16 -6
- src/engines/engine_models.rs +87 -0
- src/engines/mod.rs +1 -0
- src/engines/searx.rs +18 -5
public/static/index.js
CHANGED
@@ -4,7 +4,7 @@ function search_web() {
|
|
4 |
}
|
5 |
|
6 |
search_box.addEventListener('keyup', (e) => {
|
7 |
-
if (e.
|
8 |
search_web()
|
9 |
}
|
10 |
})
|
|
|
4 |
}
|
5 |
|
6 |
search_box.addEventListener('keyup', (e) => {
|
7 |
+
if (e.key === 'Enter') {
|
8 |
search_web()
|
9 |
}
|
10 |
})
|
src/config_parser/parser.rs
CHANGED
@@ -118,7 +118,7 @@ impl Config {
|
|
118 |
{
|
119 |
Ok("./websurfx/config.lua".to_string())
|
120 |
} else {
|
121 |
-
Err(
|
122 |
}
|
123 |
}
|
124 |
}
|
|
|
118 |
{
|
119 |
Ok("./websurfx/config.lua".to_string())
|
120 |
} else {
|
121 |
+
Err("Config file not found!!".to_string().into())
|
122 |
}
|
123 |
}
|
124 |
}
|
src/engines/duckduckgo.rs
CHANGED
@@ -2,13 +2,15 @@
|
|
2 |
//! by querying the upstream duckduckgo search engine with user provided query and with a page
|
3 |
//! number if provided.
|
4 |
|
5 |
-
use std::collections::HashMap;
|
6 |
|
7 |
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
|
8 |
use scraper::{Html, Selector};
|
9 |
|
10 |
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
11 |
|
|
|
|
|
12 |
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
|
13 |
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
|
14 |
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
|
@@ -22,14 +24,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult;
|
|
22 |
///
|
23 |
/// # Errors
|
24 |
///
|
25 |
-
/// Returns
|
26 |
-
/// reach the above `upstream search engine` page
|
27 |
-
///
|
|
|
28 |
pub async fn results(
|
29 |
query: &str,
|
30 |
page: u32,
|
31 |
user_agent: &str,
|
32 |
-
) -> Result<HashMap<String, RawSearchResult>,
|
33 |
// Page number can be missing or empty string and so appropriate handling is required
|
34 |
// so that upstream server recieves valid page number.
|
35 |
let url: String = match page {
|
@@ -54,9 +57,9 @@ pub async fn results(
|
|
54 |
header_map.insert(COOKIE, "kl=wt-wt".parse()?);
|
55 |
|
56 |
// fetch the html from upstream duckduckgo engine
|
57 |
-
// TODO: Write better error handling code to handle no results case.
|
58 |
let results: String = reqwest::Client::new()
|
59 |
.get(url)
|
|
|
60 |
.headers(header_map) // add spoofed headers to emulate human behaviour
|
61 |
.send()
|
62 |
.await?
|
@@ -64,6 +67,13 @@ pub async fn results(
|
|
64 |
.await?;
|
65 |
|
66 |
let document: Html = Html::parse_document(&results);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
let results: Selector = Selector::parse(".result")?;
|
68 |
let result_title: Selector = Selector::parse(".result__a")?;
|
69 |
let result_url: Selector = Selector::parse(".result__url")?;
|
|
|
2 |
//! by querying the upstream duckduckgo search engine with user provided query and with a page
|
3 |
//! number if provided.
|
4 |
|
5 |
+
use std::{collections::HashMap, time::Duration};
|
6 |
|
7 |
use reqwest::header::{HeaderMap, CONTENT_TYPE, COOKIE, REFERER, USER_AGENT};
|
8 |
use scraper::{Html, Selector};
|
9 |
|
10 |
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
11 |
|
12 |
+
use super::engine_models::EngineErrorKind;
|
13 |
+
|
14 |
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
|
15 |
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
|
16 |
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
|
|
|
24 |
///
|
25 |
/// # Errors
|
26 |
///
|
27 |
+
/// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to
|
28 |
+
/// reach the above `upstream search engine` page or if the `upstream search engine` is unable to
|
29 |
+
/// provide results for the requested search query and also returns error if the scraping selector
|
30 |
+
/// or HeaderMap fails to initialize.
|
31 |
pub async fn results(
|
32 |
query: &str,
|
33 |
page: u32,
|
34 |
user_agent: &str,
|
35 |
+
) -> Result<HashMap<String, RawSearchResult>, EngineErrorKind> {
|
36 |
// Page number can be missing or empty string and so appropriate handling is required
|
37 |
// so that upstream server recieves valid page number.
|
38 |
let url: String = match page {
|
|
|
57 |
header_map.insert(COOKIE, "kl=wt-wt".parse()?);
|
58 |
|
59 |
// fetch the html from upstream duckduckgo engine
|
|
|
60 |
let results: String = reqwest::Client::new()
|
61 |
.get(url)
|
62 |
+
.timeout(Duration::from_secs(30))
|
63 |
.headers(header_map) // add spoofed headers to emulate human behaviour
|
64 |
.send()
|
65 |
.await?
|
|
|
67 |
.await?;
|
68 |
|
69 |
let document: Html = Html::parse_document(&results);
|
70 |
+
|
71 |
+
let no_result: Selector = Selector::parse(".no-results")?;
|
72 |
+
|
73 |
+
if document.select(&no_result).next().is_some() {
|
74 |
+
return Err(EngineErrorKind::EmptyResultSet);
|
75 |
+
}
|
76 |
+
|
77 |
let results: Selector = Selector::parse(".result")?;
|
78 |
let result_title: Selector = Selector::parse(".result__a")?;
|
79 |
let result_url: Selector = Selector::parse(".result__url")?;
|
src/engines/engine_models.rs
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
//! This module provides the error enum to handle different errors associated while requesting data from
|
2 |
+
//! the upstream search engines with the search query provided by the user.
|
3 |
+
|
4 |
+
use reqwest::header::InvalidHeaderValue;
|
5 |
+
use scraper::error::SelectorErrorKind;
|
6 |
+
|
7 |
+
/// A custom error type used for handle engine associated errors.
|
8 |
+
///
|
9 |
+
/// This enum provides variants three different categories of errors:
|
10 |
+
/// * `RequestError` - This variant handles all request related errors like forbidden, not found,
|
11 |
+
/// etc.
|
12 |
+
/// * `EmptyResultSet` - This variant handles the not results found error provide by the upstream
|
13 |
+
/// search engines.
|
14 |
+
/// * `UnexpectedError` - This variant handles all the errors which are unexpected or occur rarely
|
15 |
+
/// and are errors mostly related to failure in initialization of HeaderMap, Selector errors and
|
16 |
+
/// all other errors occuring within the code handling the `upstream search engines`.
|
17 |
+
#[derive(Debug)]
|
18 |
+
pub enum EngineErrorKind {
|
19 |
+
RequestError(reqwest::Error),
|
20 |
+
EmptyResultSet,
|
21 |
+
UnexpectedError {
|
22 |
+
message: String,
|
23 |
+
source: Option<Box<dyn std::error::Error>>,
|
24 |
+
},
|
25 |
+
}
|
26 |
+
|
27 |
+
/// Implementing `Display` trait to make errors writable on the stdout and also providing/passing the
|
28 |
+
/// appropriate errors that should be written to the stdout when this error is raised/encountered.
|
29 |
+
impl std::fmt::Display for EngineErrorKind {
|
30 |
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
31 |
+
match self {
|
32 |
+
EngineErrorKind::RequestError(request_error) => {
|
33 |
+
write!(f, "Request error: {}", request_error)
|
34 |
+
}
|
35 |
+
EngineErrorKind::EmptyResultSet => {
|
36 |
+
write!(f, "The upstream search engine returned an empty result set")
|
37 |
+
}
|
38 |
+
EngineErrorKind::UnexpectedError { message, source } => {
|
39 |
+
write!(f, "Unexpected error: {}", message)?;
|
40 |
+
if let Some(source) = source {
|
41 |
+
write!(f, "\nCaused by: {}", source)?;
|
42 |
+
}
|
43 |
+
Ok(())
|
44 |
+
}
|
45 |
+
}
|
46 |
+
}
|
47 |
+
}
|
48 |
+
|
49 |
+
/// Implementing `Error` trait to make the the `EngineErrorKind` enum an error type and
|
50 |
+
/// mapping `ReqwestErrors` to `RequestError` and `UnexpectedError` errors to all other unexpected
|
51 |
+
/// errors ocurring within the code handling the upstream search engines.
|
52 |
+
impl std::error::Error for EngineErrorKind {
|
53 |
+
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
54 |
+
match self {
|
55 |
+
EngineErrorKind::RequestError(request_error) => Some(request_error),
|
56 |
+
EngineErrorKind::UnexpectedError { source, .. } => source.as_deref().map(|s| s),
|
57 |
+
_ => None,
|
58 |
+
}
|
59 |
+
}
|
60 |
+
}
|
61 |
+
|
62 |
+
/// Implementing `From` trait to map the `SelectorErrorKind` to `UnexpectedError` variant.
|
63 |
+
impl From<SelectorErrorKind<'_>> for EngineErrorKind {
|
64 |
+
fn from(err: SelectorErrorKind<'_>) -> Self {
|
65 |
+
Self::UnexpectedError {
|
66 |
+
message: err.to_string(),
|
67 |
+
source: None,
|
68 |
+
}
|
69 |
+
}
|
70 |
+
}
|
71 |
+
|
72 |
+
/// Implementing `From` trait to map the `InvalidHeaderValue` to `UnexpectedError` variant.
|
73 |
+
impl From<InvalidHeaderValue> for EngineErrorKind {
|
74 |
+
fn from(err: InvalidHeaderValue) -> Self {
|
75 |
+
Self::UnexpectedError {
|
76 |
+
message: err.to_string(),
|
77 |
+
source: Some(Box::new(err)),
|
78 |
+
}
|
79 |
+
}
|
80 |
+
}
|
81 |
+
|
82 |
+
/// Implementing `From` trait to map all `reqwest::Error` to `UnexpectedError` variant.
|
83 |
+
impl From<reqwest::Error> for EngineErrorKind {
|
84 |
+
fn from(err: reqwest::Error) -> Self {
|
85 |
+
Self::RequestError(err)
|
86 |
+
}
|
87 |
+
}
|
src/engines/mod.rs
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
pub mod duckduckgo;
|
|
|
2 |
pub mod searx;
|
|
|
1 |
pub mod duckduckgo;
|
2 |
+
pub mod engine_models;
|
3 |
pub mod searx;
|
src/engines/searx.rs
CHANGED
@@ -8,6 +8,8 @@ use std::collections::HashMap;
|
|
8 |
|
9 |
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
10 |
|
|
|
|
|
11 |
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
|
12 |
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
|
13 |
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
|
@@ -21,14 +23,15 @@ use crate::search_results_handler::aggregation_models::RawSearchResult;
|
|
21 |
///
|
22 |
/// # Errors
|
23 |
///
|
24 |
-
/// Returns
|
25 |
-
/// reach the above `upstream search engine` page
|
26 |
-
///
|
|
|
27 |
pub async fn results(
|
28 |
query: &str,
|
29 |
page: u32,
|
30 |
user_agent: &str,
|
31 |
-
) -> Result<HashMap<String, RawSearchResult>,
|
32 |
// Page number can be missing or empty string and so appropriate handling is required
|
33 |
// so that upstream server recieves valid page number.
|
34 |
let url: String = format!("https://searx.work/search?q={query}&pageno={page}");
|
@@ -41,7 +44,6 @@ pub async fn results(
|
|
41 |
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?);
|
42 |
|
43 |
// fetch the html from upstream searx instance engine
|
44 |
-
// TODO: Write better error handling code to handle no results case.
|
45 |
let results: String = reqwest::Client::new()
|
46 |
.get(url)
|
47 |
.headers(header_map) // add spoofed headers to emulate human behaviours.
|
@@ -51,6 +53,17 @@ pub async fn results(
|
|
51 |
.await?;
|
52 |
|
53 |
let document: Html = Html::parse_document(&results);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
let results: Selector = Selector::parse(".result")?;
|
55 |
let result_title: Selector = Selector::parse("h3>a")?;
|
56 |
let result_url: Selector = Selector::parse("h3>a")?;
|
|
|
8 |
|
9 |
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
10 |
|
11 |
+
use super::engine_models::EngineErrorKind;
|
12 |
+
|
13 |
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
|
14 |
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
|
15 |
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
|
|
|
23 |
///
|
24 |
/// # Errors
|
25 |
///
|
26 |
+
/// Returns an `EngineErrorKind` if the user is not connected to the internet or if their is failure to
|
27 |
+
/// reach the above `upstream search engine` page or if the `upstream search engine` is unable to
|
28 |
+
/// provide results for the requested search query and also returns error if the scraping selector
|
29 |
+
/// or HeaderMap fails to initialize.
|
30 |
pub async fn results(
|
31 |
query: &str,
|
32 |
page: u32,
|
33 |
user_agent: &str,
|
34 |
+
) -> Result<HashMap<String, RawSearchResult>, EngineErrorKind> {
|
35 |
// Page number can be missing or empty string and so appropriate handling is required
|
36 |
// so that upstream server recieves valid page number.
|
37 |
let url: String = format!("https://searx.work/search?q={query}&pageno={page}");
|
|
|
44 |
header_map.insert(COOKIE, "categories=general; language=auto; locale=en; autocomplete=duckduckgo; image_proxy=1; method=POST; safesearch=2; theme=simple; results_on_new_tab=1; doi_resolver=oadoi.org; simple_style=auto; center_alignment=1; query_in_title=1; infinite_scroll=0; disabled_engines=; enabled_engines=\"archive is__general\\054yep__general\\054curlie__general\\054currency__general\\054ddg definitions__general\\054wikidata__general\\054duckduckgo__general\\054tineye__general\\054lingva__general\\054startpage__general\\054yahoo__general\\054wiby__general\\054marginalia__general\\054alexandria__general\\054wikibooks__general\\054wikiquote__general\\054wikisource__general\\054wikiversity__general\\054wikivoyage__general\\054dictzone__general\\054seznam__general\\054mojeek__general\\054naver__general\\054wikimini__general\\054brave__general\\054petalsearch__general\\054goo__general\"; disabled_plugins=; enabled_plugins=\"searx.plugins.hostname_replace\\054searx.plugins.oa_doi_rewrite\\054searx.plugins.vim_hotkeys\"; tokens=; maintab=on; enginetab=on".parse()?);
|
45 |
|
46 |
// fetch the html from upstream searx instance engine
|
|
|
47 |
let results: String = reqwest::Client::new()
|
48 |
.get(url)
|
49 |
.headers(header_map) // add spoofed headers to emulate human behaviours.
|
|
|
53 |
.await?;
|
54 |
|
55 |
let document: Html = Html::parse_document(&results);
|
56 |
+
|
57 |
+
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")?;
|
58 |
+
|
59 |
+
if let Some(no_result_msg) = document.select(&no_result).nth(1) {
|
60 |
+
if no_result_msg.inner_html()
|
61 |
+
== "we didn't find any results. Please use another query or search in more categories"
|
62 |
+
{
|
63 |
+
return Err(EngineErrorKind::EmptyResultSet);
|
64 |
+
}
|
65 |
+
}
|
66 |
+
|
67 |
let results: Selector = Selector::parse(".result")?;
|
68 |
let result_title: Selector = Selector::parse("h3>a")?;
|
69 |
let result_url: Selector = Selector::parse("h3>a")?;
|