alamin655 commited on
Commit
94046cb
2 Parent(s): 5761bca 098ae8e

Merge branch 'rolling' into check

Browse files
.gitpod.yml CHANGED
@@ -1,22 +1,23 @@
1
  ---
2
- image: ubuntu:latest
3
  # Commands that will run on workspace start
4
  tasks:
5
  - name: Setup, Install & Build
6
- - before: apt install cargo redis-server nodejs npm && cargo test
7
- - init: cargo install cargo-watch
8
- - command: redis-server --port 8080 & cargo watch -q -w "." -x "run"
9
  # Ports to expose on workspace startup
10
  ports:
11
  - name: Website
12
  description: Website Preview
13
  port: 8080
14
- onOpen: open-browser
15
  # vscode IDE setup
16
  vscode:
17
  extensions:
18
  - vadimcn.vscode-lldb
19
- - rust-lang.rust-analyzer
 
20
  - bungcip.better-toml
21
  - serayuzgur.crates
22
  - usernamehw.errorlens
@@ -27,10 +28,8 @@ vscode:
27
  - evgeniypeshkov.syntax-highlighter
28
  - redhat.vscode-yaml
29
  - ms-azuretools.vscode-docker
30
- - GitHub.vscode-github-actions
31
  - Catppuccin.catppuccin-vsc
32
  - PKief.material-icon-theme
33
- - tal7aouy.rainbow-bracket
34
  - oderwat.indent-rainbow
35
  - formulahendry.auto-rename-tag
36
  - eamodio.gitlens
 
1
  ---
2
+ image: gitpod/workspace-base
3
  # Commands that will run on workspace start
4
  tasks:
5
  - name: Setup, Install & Build
6
+ before: apt install cargo redis-server nodejs npm -y && cargo test
7
+ init: cargo install cargo-watch
8
+ command: redis-server --port 8080 & cargo watch -q -w "." -x "run"
9
  # Ports to expose on workspace startup
10
  ports:
11
  - name: Website
12
  description: Website Preview
13
  port: 8080
14
+ onOpen: open-preview
15
  # vscode IDE setup
16
  vscode:
17
  extensions:
18
  - vadimcn.vscode-lldb
19
+ - cschleiden.vscode-github-actions
20
+ - rust-lang.rust
21
  - bungcip.better-toml
22
  - serayuzgur.crates
23
  - usernamehw.errorlens
 
28
  - evgeniypeshkov.syntax-highlighter
29
  - redhat.vscode-yaml
30
  - ms-azuretools.vscode-docker
 
31
  - Catppuccin.catppuccin-vsc
32
  - PKief.material-icon-theme
 
33
  - oderwat.indent-rainbow
34
  - formulahendry.auto-rename-tag
35
  - eamodio.gitlens
src/cache/cacher.rs CHANGED
@@ -41,7 +41,7 @@ impl RedisCache {
41
  /// # Arguments
42
  ///
43
  /// * `url` - It takes an url as a string.
44
- pub fn get_cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
45
  let hashed_url_string = Self::hash_url(url);
46
  Ok(self.connection.get(hashed_url_string)?)
47
  }
 
41
  /// # Arguments
42
  ///
43
  /// * `url` - It takes an url as a string.
44
+ pub fn cached_json(&mut self, url: &str) -> Result<String, Box<dyn std::error::Error>> {
45
  let hashed_url_string = Self::hash_url(url);
46
  Ok(self.connection.get(hashed_url_string)?)
47
  }
src/config/parser.rs CHANGED
@@ -50,7 +50,7 @@ impl Config {
50
  let globals = context.globals();
51
 
52
  context
53
- .load(&fs::read_to_string(Config::get_config_path()?)?)
54
  .exec()?;
55
 
56
  Ok(Config {
@@ -81,7 +81,7 @@ impl Config {
81
  /// one (3).
82
  /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
83
  /// here then it returns an error as mentioned above.
84
- fn get_config_path() -> Result<String, Box<dyn std::error::Error>> {
85
  // check user config
86
 
87
  let path = format!(
 
50
  let globals = context.globals();
51
 
52
  context
53
+ .load(&fs::read_to_string(Config::config_path()?)?)
54
  .exec()?;
55
 
56
  Ok(Config {
 
81
  /// one (3).
82
  /// 3. `websurfx/` (under project folder ( or codebase in other words)) if it is not present
83
  /// here then it returns an error as mentioned above.
84
+ fn config_path() -> Result<String, Box<dyn std::error::Error>> {
85
  // check user config
86
 
87
  let path = format!(
src/handler/public_paths.rs CHANGED
@@ -17,7 +17,7 @@ static PUBLIC_DIRECTORY_NAME: &str = "public";
17
  /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
18
  /// 2. Under project folder ( or codebase in other words) if it is not present
19
  /// here then it returns an error as mentioned above.
20
- pub fn get_public_path() -> Result<String, Error> {
21
  if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
22
  return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
23
  }
 
17
  /// 1. `/opt/websurfx` if it not present here then it fallbacks to the next one (2)
18
  /// 2. Under project folder ( or codebase in other words) if it is not present
19
  /// here then it returns an error as mentioned above.
20
+ pub fn public_path() -> Result<String, Error> {
21
  if Path::new(format!("/opt/websurfx/{}/", PUBLIC_DIRECTORY_NAME).as_str()).exists() {
22
  return Ok(format!("/opt/websurfx/{}", PUBLIC_DIRECTORY_NAME));
23
  }
src/lib.rs CHANGED
@@ -16,7 +16,7 @@ use actix_files as fs;
16
  use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
17
  use config::parser::Config;
18
  use handlebars::Handlebars;
19
- use handler::public_paths::get_public_path;
20
 
21
  /// Runs the web server on the provided TCP listener and returns a `Server` instance.
22
  ///
@@ -41,7 +41,7 @@ use handler::public_paths::get_public_path;
41
  pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
42
  let mut handlebars: Handlebars = Handlebars::new();
43
 
44
- let public_folder_path: String = get_public_path()?;
45
 
46
  handlebars
47
  .register_templates_directory(".html", format!("{}/templates", public_folder_path))
 
16
  use actix_web::{dev::Server, middleware::Logger, web, App, HttpServer};
17
  use config::parser::Config;
18
  use handlebars::Handlebars;
19
+ use handler::public_paths::public_path;
20
 
21
  /// Runs the web server on the provided TCP listener and returns a `Server` instance.
22
  ///
 
41
  pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
42
  let mut handlebars: Handlebars = Handlebars::new();
43
 
44
+ let public_folder_path: String = public_path()?;
45
 
46
  handlebars
47
  .register_templates_directory(".html", format!("{}/templates", public_folder_path))
src/server/routes.rs CHANGED
@@ -7,7 +7,7 @@ use std::fs::read_to_string;
7
  use crate::{
8
  cache::cacher::RedisCache,
9
  config::parser::Config,
10
- handler::public_paths::get_public_path,
11
  results::{aggregation_models::SearchResults, aggregator::aggregate},
12
  };
13
  use actix_web::{get, web, HttpRequest, HttpResponse};
@@ -89,7 +89,7 @@ pub async fn search(
89
  "http://{}:{}/search?q={}&page={}",
90
  config.binding_ip, config.port, query, page
91
  );
92
- let results_json = get_results(url, &config, query, page).await?;
93
  let page_content: String = hbs.render("search", &results_json)?;
94
  Ok(HttpResponse::Ok().body(page_content))
95
  }
@@ -101,7 +101,7 @@ pub async fn search(
101
 
102
  /// Fetches the results for a query and page.
103
  /// First checks the redis cache, if that fails it gets proper results
104
- async fn get_results(
105
  url: String,
106
  config: &Config,
107
  query: &str,
@@ -110,7 +110,7 @@ async fn get_results(
110
  //Initialize redis cache connection struct
111
  let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
112
  // fetch the cached results json.
113
- let cached_results_json = redis_cache.get_cached_json(&url);
114
  // check if fetched results was indeed fetched or it was an error and if so
115
  // handle the data accordingly.
116
  match cached_results_json {
@@ -128,7 +128,7 @@ async fn get_results(
128
  /// Handles the route of robots.txt page of the `websurfx` meta search engine website.
129
  #[get("/robots.txt")]
130
  pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
131
- let page_content: String = read_to_string(format!("{}/robots.txt", get_public_path()?))?;
132
  Ok(HttpResponse::Ok()
133
  .content_type("text/plain; charset=ascii")
134
  .body(page_content))
 
7
  use crate::{
8
  cache::cacher::RedisCache,
9
  config::parser::Config,
10
+ handler::public_paths::public_path,
11
  results::{aggregation_models::SearchResults, aggregator::aggregate},
12
  };
13
  use actix_web::{get, web, HttpRequest, HttpResponse};
 
89
  "http://{}:{}/search?q={}&page={}",
90
  config.binding_ip, config.port, query, page
91
  );
92
+ let results_json = results(url, &config, query, page).await?;
93
  let page_content: String = hbs.render("search", &results_json)?;
94
  Ok(HttpResponse::Ok().body(page_content))
95
  }
 
101
 
102
  /// Fetches the results for a query and page.
103
  /// First checks the redis cache, if that fails it gets proper results
104
+ async fn results(
105
  url: String,
106
  config: &Config,
107
  query: &str,
 
110
  //Initialize redis cache connection struct
111
  let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
112
  // fetch the cached results json.
113
+ let cached_results_json = redis_cache.cached_json(&url);
114
  // check if fetched results was indeed fetched or it was an error and if so
115
  // handle the data accordingly.
116
  match cached_results_json {
 
128
  /// Handles the route of robots.txt page of the `websurfx` meta search engine website.
129
  #[get("/robots.txt")]
130
  pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
131
+ let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?;
132
  Ok(HttpResponse::Ok()
133
  .content_type("text/plain; charset=ascii")
134
  .body(page_content))