File size: 6,719 Bytes
fc69ace
 
 
 
15fc415
 
c170de8
 
 
 
 
15fc415
 
 
 
fc69ace
 
 
 
 
 
 
 
15fc415
 
 
 
 
 
fc69ace
15fc415
 
 
137c62e
15fc415
137c62e
15fc415
 
 
fc69ace
 
15fc415
 
137c62e
15fc415
137c62e
15fc415
 
 
 
 
 
fc69ace
 
 
 
 
 
 
 
137c62e
fc69ace
 
 
 
 
15fc415
 
 
 
137c62e
15fc415
 
c170de8
 
 
15fc415
 
 
 
 
 
 
c170de8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15fc415
 
 
 
 
 
 
 
fc69ace
15fc415
 
 
 
 
 
 
 
fc69ace
15fc415
 
 
137c62e
15fc415
137c62e
15fc415
 
 
fc69ace
15fc415
 
 
137c62e
15fc415
137c62e
15fc415
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
//! This module provides the functionality to handle different routes of the `websurfx`
//! meta search engine website and provide approriate response to each route/page
//! when requested.

use std::fs::read_to_string;

use crate::{
    cache::cacher::RedisCache,
    config_parser::parser::Config,
    search_results_handler::{aggregation_models::SearchResults, aggregator::aggregate},
};
use actix_web::{get, web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
use serde::Deserialize;

/// A named struct which deserializes all the user provided search parameters and stores them.
///
/// # Fields
///
/// * `q` - It stores the search parameter option `q` (or query in simple words)
/// of the search url.
/// * `page` - It stores the search parameter `page` (or pageno in simple words)
/// of the search url.
#[derive(Debug, Deserialize)]
struct SearchParams {
    q: Option<String>,
    page: Option<u32>,
}

/// Handles the route of index page or main page of the `websurfx` meta search engine website.
#[get("/")]
pub async fn index(
    hbs: web::Data<Handlebars<'_>>,
    config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let page_content: String = hbs.render("index", &config.style).unwrap();
    Ok(HttpResponse::Ok().body(page_content))
}

/// Handles the route of any other accessed route/page which is not provided by the
/// website essentially the 404 error page.
pub async fn not_found(
    hbs: web::Data<Handlebars<'_>>,
    config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let page_content: String = hbs.render("404", &config.style)?;

    Ok(HttpResponse::Ok()
        .content_type("text/html; charset=utf-8")
        .body(page_content))
}

/// Handles the route of search page of the `websurfx` meta search engine website and it takes
/// two search url parameters `q` and `page` where `page` parameter is optional.
///
/// # Example
///
/// ```bash
/// curl "http://127.0.0.1:8080/search?q=sweden&page=1"
/// ```
///
/// Or
///
/// ```bash
/// curl "http://127.0.0.1:8080/search?q=sweden"
/// ```
#[get("/search")]
pub async fn search(
    hbs: web::Data<Handlebars<'_>>,
    req: HttpRequest,
    config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let params = web::Query::<SearchParams>::from_query(req.query_string())?;

    //Initialize redis cache connection struct
    let redis_cache = RedisCache::new(config.redis_connection_url.clone());
    match &params.q {
        Some(query) => {
            if query.trim().is_empty() {
                Ok(HttpResponse::Found()
                    .insert_header(("location", "/"))
                    .finish())
            } else {
                // Initialize the page url as an empty string
                let mut page_url = String::new();

                // Find whether the page is valid page number if not then return
                // the first page number and also construct the page_url accordingly
                let page = match params.page {
                    Some(page_number) => {
                        if page_number <= 1 {
                            page_url = format!(
                                "http://{}:{}/search?q={}&page={}",
                                config.binding_ip_addr, config.port, query, 1
                            );
                            1
                        } else {
                            page_url = format!(
                                "http://{}:{}/search?q={}&page={}",
                                config.binding_ip_addr, config.port, query, page_number
                            );

                            page_number
                        }
                    }
                    None => {
                        page_url = format!(
                            "http://{}:{}{}&page={}",
                            config.binding_ip_addr,
                            config.port,
                            req.uri(),
                            1
                        );

                        1
                    }
                };

                // fetch the cached results json.
                let cached_results_json = redis_cache.clone().cached_results_json(page_url.clone());
                // check if fetched results was indeed fetched or it was an error and if so
                // handle the data accordingly.
                match cached_results_json {
                    Ok(results_json) => {
                        let new_results_json: SearchResults = serde_json::from_str(&results_json)?;
                        let page_content: String = hbs.render("search", &new_results_json)?;
                        Ok(HttpResponse::Ok().body(page_content))
                    }
                    Err(_) => {
                        let mut results_json: crate::search_results_handler::aggregation_models::SearchResults =
                    aggregate(query, page).await?;
                        results_json.add_style(config.style.clone());
                        redis_cache.clone().cache_results(
                            serde_json::to_string(&results_json)?,
                            page_url.clone(),
                        )?;
                        let page_content: String = hbs.render("search", &results_json)?;
                        Ok(HttpResponse::Ok().body(page_content))
                    }
                }
            }
        }
        None => Ok(HttpResponse::Found()
            .insert_header(("location", "/"))
            .finish()),
    }
}

/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
#[get("/robots.txt")]
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let page_content: String = read_to_string("./public/robots.txt")?;
    Ok(HttpResponse::Ok()
        .content_type("text/plain; charset=ascii")
        .body(page_content))
}

/// Handles the route of about page of the `websurfx` meta search engine website.
#[get("/about")]
pub async fn about(
    hbs: web::Data<Handlebars<'_>>,
    config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let page_content: String = hbs.render("about", &config.style)?;
    Ok(HttpResponse::Ok().body(page_content))
}

/// Handles the route of settings page of the `websurfx` meta search engine website.
#[get("/settings")]
pub async fn settings(
    hbs: web::Data<Handlebars<'_>>,
    config: web::Data<Config>,
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
    let page_content: String = hbs.render("settings", &config.style)?;
    Ok(HttpResponse::Ok().body(page_content))
}