diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile new file mode 100644 index 0000000..c479341 --- /dev/null +++ b/.gitpod.Dockerfile @@ -0,0 +1,3 @@ +FROM gitpod/workspace-rust + +RUN sudo install-packages redis-server nodejs npm diff --git a/.gitpod.yml b/.gitpod.yml index 4eeabff..d1b8f3b 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,23 +1,25 @@ --- -image: gitpod/workspace-base +image: + file: .gitpod.Dockerfile + # Commands that will run on workspace start tasks: - - name: Setup, Install & Build - before: apt install cargo redis-server nodejs npm -y && cargo test - init: cargo install cargo-watch - command: redis-server --port 8080 & cargo watch -q -w "." -x "run" -# Ports to expose on workspace startup -ports: - - name: Website - description: Website Preview - port: 8080 - onOpen: open-preview + - name: Start Redis Server + command: redis-server --port 8082 + - name: Run The App + init: cargo build + command: PKG_ENV=dev ./target/release/websurfx + - name: Tests + command: cargo test + - name: Clippy Checks + command: cargo clippy + # vscode IDE setup vscode: extensions: - vadimcn.vscode-lldb - cschleiden.vscode-github-actions - - rust-lang.rust + - rust-lang.rust-analyzer - bungcip.better-toml - serayuzgur.crates - usernamehw.errorlens @@ -26,13 +28,17 @@ vscode: - stylelint.vscode-stylelint - dbaeumer.vscode-eslint - evgeniypeshkov.syntax-highlighter - - redhat.vscode-yaml - ms-azuretools.vscode-docker - Catppuccin.catppuccin-vsc - PKief.material-icon-theme - oderwat.indent-rainbow - formulahendry.auto-rename-tag + - swellaby.vscode-rust-test-adapter + - belfz.search-crates-io + - hbenl.test-adapter-converter + - hbenl.vscode-test-explorer - eamodio.gitlens + github: prebuilds: master: true @@ -40,5 +46,5 @@ github: pullRequests: true pullRequestsFromForks: true addCheck: true - addComment: false + addComment: false addBadge: true diff --git a/docs/configuration.md b/docs/configuration.md index 90b8ce8..665d939 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -23,6 +23,7 @@ Some of the configuration options provided in the file are stated below. These a - **logging:** An option to enable or disable logs. - **debug:** An option to enable or disable debug mode. +- **threads:** The amount of threads that the app will use to run (the value should be greater than 0). ## Server diff --git a/docs/installation.md b/docs/installation.md index f77f040..8f2ee2e 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -79,6 +79,7 @@ After that edit the config.lua file located under `websurfx` directory. In the c -- ### General ### logging = true -- an option to enable or disable logs. debug = false -- an option to enable or disable debug mode. +threads = 10 -- the amount of threads that the app will use to run (the value should be greater than 0). -- ### Server ### port = "8080" -- port on which server should be launched diff --git a/src/bin/websurfx.rs b/src/bin/websurfx.rs index b4b989e..6807749 100644 --- a/src/bin/websurfx.rs +++ b/src/bin/websurfx.rs @@ -24,8 +24,8 @@ async fn main() -> std::io::Result<()> { ); log::info!( "Open http://{}:{}/ in your browser", + config.binding_ip, config.port, - config.binding_ip ); let listener = TcpListener::bind((config.binding_ip.clone(), config.port))?; diff --git a/src/config/parser.rs b/src/config/parser.rs index 51a83ea..201e579 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -4,7 +4,7 @@ use super::parser_models::Style; use log::LevelFilter; use rlua::Lua; -use std::{collections::HashMap, format, fs, io::Write, path::Path, thread::available_parallelism}; +use std::{collections::HashMap, format, fs, path::Path, thread::available_parallelism}; // ------- Constants -------- static COMMON_DIRECTORY_NAME: &str = "websurfx"; @@ -79,26 +79,26 @@ impl Config { // Check whether logging has not been initialized before. if logging_initialized { - // Initializing logging middleware with level set to default or info. - let mut log_level: LevelFilter = LevelFilter::Off; - if logging && debug == false { - log_level = LevelFilter::Info; - } else if debug { - log_level = LevelFilter::Trace; - }; - env_logger::Builder::new().filter(None, log_level).init(); + if let Ok(pkg_env_var) = std::env::var("PKG_ENV"){ + if pkg_env_var.to_lowercase() == "dev" { + env_logger::Builder::new().filter(None, LevelFilter::Trace).init(); + } + } else { + // Initializing logging middleware with level set to default or info. + let mut log_level: LevelFilter = LevelFilter::Error; + if logging && debug == false { + log_level = LevelFilter::Info; + } else if debug { + log_level = LevelFilter::Debug; + }; + env_logger::Builder::new().filter(None, log_level).init(); + } } let threads: u8 = if parsed_threads == 0 { - let total_num_of_threads:usize = available_parallelism()?.get() /2; - if debug || logging { - log::error!("Config Error: The value of `threads` option should be a non zero positive integer"); - log::info!("Falling back to using {} threads", total_num_of_threads) - } else { - std::io::stdout() - .lock() - .write_all(&format!("Config Error: The value of `threads` option should be a non zero positive integer\nFalling back to using {} threads\n", total_num_of_threads).into_bytes())?; - }; + let total_num_of_threads: usize = available_parallelism()?.get() / 2; + log::error!("Config Error: The value of `threads` option should be a non zero positive integer"); + log::error!("Falling back to using {} threads", total_num_of_threads); total_num_of_threads as u8 } else { parsed_threads diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index b8d7346..4ffaaf5 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -144,6 +144,7 @@ pub async fn aggregate( initial = false } Err(error_type) => { + log::error!("Engine Error: {:?}", error_type); engine_errors_info.push(EngineErrorInfo::new( error_type.downcast_ref::().unwrap(), upstream_search_engines[counter].clone(), @@ -172,6 +173,7 @@ pub async fn aggregate( counter += 1 } Err(error_type) => { + log::error!("Engine Error: {:?}", error_type); engine_errors_info.push(EngineErrorInfo::new( error_type.downcast_ref::().unwrap(), upstream_search_engines[counter].clone(),