2023-09-03 20:50:50 +03:00
|
|
|
//! This module handles the search route of the search engine website.
|
2023-04-22 14:35:07 +03:00
|
|
|
|
2023-05-02 11:58:21 +03:00
|
|
|
use crate::{
|
2023-09-09 18:17:29 +02:00
|
|
|
cache::cacher::SharedCache,
|
2023-07-03 19:30:25 +02:00
|
|
|
config::parser::Config,
|
2023-08-22 19:22:37 +03:00
|
|
|
handler::paths::{file_path, FileType},
|
2023-09-17 12:51:32 +03:00
|
|
|
models::{
|
|
|
|
aggregation_models::SearchResults,
|
|
|
|
engine_models::EngineHandler,
|
|
|
|
server_models::{Cookie, SearchParams},
|
|
|
|
},
|
2023-09-03 20:50:50 +03:00
|
|
|
results::aggregator::aggregate,
|
2023-05-02 11:58:21 +03:00
|
|
|
};
|
2023-04-22 14:35:07 +03:00
|
|
|
use actix_web::{get, web, HttpRequest, HttpResponse};
|
|
|
|
use handlebars::Handlebars;
|
2023-09-02 17:50:06 +03:00
|
|
|
use regex::Regex;
|
2023-09-13 18:31:37 +03:00
|
|
|
use std::{
|
2023-09-17 12:51:32 +03:00
|
|
|
fs::File,
|
2023-09-13 18:31:37 +03:00
|
|
|
io::{BufRead, BufReader, Read},
|
|
|
|
};
|
2023-08-06 20:31:30 +03:00
|
|
|
use tokio::join;
|
2023-04-22 14:35:07 +03:00
|
|
|
|
2023-04-27 17:53:28 +03:00
|
|
|
/// Handles the route of any other accessed route/page which is not provided by the
|
|
|
|
/// website essentially the 404 error page.
|
2023-04-22 14:35:07 +03:00
|
|
|
pub async fn not_found(
|
|
|
|
hbs: web::Data<Handlebars<'_>>,
|
2023-04-30 18:16:08 +03:00
|
|
|
config: web::Data<Config>,
|
2023-04-22 14:35:07 +03:00
|
|
|
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
2023-04-30 18:16:08 +03:00
|
|
|
let page_content: String = hbs.render("404", &config.style)?;
|
2023-04-22 14:35:07 +03:00
|
|
|
|
|
|
|
Ok(HttpResponse::Ok()
|
|
|
|
.content_type("text/html; charset=utf-8")
|
|
|
|
.body(page_content))
|
|
|
|
}
|
|
|
|
|
2023-04-27 17:53:28 +03:00
|
|
|
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
|
|
|
|
/// two search url parameters `q` and `page` where `page` parameter is optional.
|
|
|
|
///
|
|
|
|
/// # Example
|
|
|
|
///
|
|
|
|
/// ```bash
|
|
|
|
/// curl "http://127.0.0.1:8080/search?q=sweden&page=1"
|
|
|
|
/// ```
|
2023-04-30 18:16:08 +03:00
|
|
|
///
|
2023-04-27 17:53:28 +03:00
|
|
|
/// Or
|
|
|
|
///
|
|
|
|
/// ```bash
|
|
|
|
/// curl "http://127.0.0.1:8080/search?q=sweden"
|
|
|
|
/// ```
|
2023-04-22 14:35:07 +03:00
|
|
|
#[get("/search")]
|
|
|
|
pub async fn search(
|
|
|
|
hbs: web::Data<Handlebars<'_>>,
|
|
|
|
req: HttpRequest,
|
2023-04-30 18:16:08 +03:00
|
|
|
config: web::Data<Config>,
|
2023-09-09 18:17:29 +02:00
|
|
|
cache: web::Data<SharedCache>,
|
2023-04-22 14:35:07 +03:00
|
|
|
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
|
|
|
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
|
|
|
|
match ¶ms.q {
|
|
|
|
Some(query) => {
|
|
|
|
if query.trim().is_empty() {
|
2023-07-03 19:30:25 +02:00
|
|
|
return Ok(HttpResponse::Found()
|
2023-04-22 14:35:07 +03:00
|
|
|
.insert_header(("location", "/"))
|
2023-07-03 19:30:25 +02:00
|
|
|
.finish());
|
2023-04-22 14:35:07 +03:00
|
|
|
}
|
2023-07-03 19:30:25 +02:00
|
|
|
let page = match ¶ms.page {
|
|
|
|
Some(page) => *page,
|
2023-08-06 20:31:30 +03:00
|
|
|
None => 1,
|
2023-07-03 19:30:25 +02:00
|
|
|
};
|
|
|
|
|
2023-09-02 17:50:06 +03:00
|
|
|
let safe_search: u8 = match config.safe_search {
|
|
|
|
3..=4 => config.safe_search,
|
|
|
|
_ => match ¶ms.safesearch {
|
|
|
|
Some(safesearch) => match safesearch {
|
|
|
|
0..=2 => *safesearch,
|
|
|
|
_ => 1,
|
|
|
|
},
|
|
|
|
None => config.safe_search,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2023-08-06 20:31:30 +03:00
|
|
|
let (_, results, _) = join!(
|
|
|
|
results(
|
|
|
|
format!(
|
2023-09-02 17:50:06 +03:00
|
|
|
"http://{}:{}/search?q={}&page={}&safesearch={}",
|
2023-08-06 20:31:30 +03:00
|
|
|
config.binding_ip,
|
|
|
|
config.port,
|
|
|
|
query,
|
2023-09-02 17:50:06 +03:00
|
|
|
page - 1,
|
|
|
|
safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
),
|
|
|
|
&config,
|
2023-09-09 18:17:29 +02:00
|
|
|
&cache,
|
2023-08-27 21:06:25 +03:00
|
|
|
query,
|
2023-08-06 20:31:30 +03:00
|
|
|
page - 1,
|
|
|
|
req.clone(),
|
2023-09-02 17:50:06 +03:00
|
|
|
safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
),
|
|
|
|
results(
|
|
|
|
format!(
|
2023-09-02 17:50:06 +03:00
|
|
|
"http://{}:{}/search?q={}&page={}&safesearch={}",
|
|
|
|
config.binding_ip, config.port, query, page, safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
),
|
|
|
|
&config,
|
2023-09-09 18:17:29 +02:00
|
|
|
&cache,
|
2023-08-27 21:06:25 +03:00
|
|
|
query,
|
2023-08-06 20:31:30 +03:00
|
|
|
page,
|
|
|
|
req.clone(),
|
2023-09-02 17:50:06 +03:00
|
|
|
safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
),
|
|
|
|
results(
|
|
|
|
format!(
|
2023-09-02 17:50:06 +03:00
|
|
|
"http://{}:{}/search?q={}&page={}&safesearch={}",
|
2023-08-06 20:31:30 +03:00
|
|
|
config.binding_ip,
|
|
|
|
config.port,
|
|
|
|
query,
|
2023-09-02 17:50:06 +03:00
|
|
|
page + 1,
|
|
|
|
safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
),
|
|
|
|
&config,
|
2023-09-09 18:17:29 +02:00
|
|
|
&cache,
|
2023-08-27 21:06:25 +03:00
|
|
|
query,
|
2023-08-06 20:31:30 +03:00
|
|
|
page + 1,
|
|
|
|
req.clone(),
|
2023-09-02 17:50:06 +03:00
|
|
|
safe_search
|
2023-08-06 20:31:30 +03:00
|
|
|
)
|
2023-07-03 19:30:25 +02:00
|
|
|
);
|
2023-08-06 20:31:30 +03:00
|
|
|
|
|
|
|
let page_content: String = hbs.render("search", &results?)?;
|
2023-07-03 19:30:25 +02:00
|
|
|
Ok(HttpResponse::Ok().body(page_content))
|
2023-04-22 14:35:07 +03:00
|
|
|
}
|
|
|
|
None => Ok(HttpResponse::Found()
|
|
|
|
.insert_header(("location", "/"))
|
|
|
|
.finish()),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-03 19:23:34 +03:00
|
|
|
/// Fetches the results for a query and page. It First checks the redis cache, if that
|
|
|
|
/// fails it gets proper results by requesting from the upstream search engines.
|
|
|
|
///
|
|
|
|
/// # Arguments
|
|
|
|
///
|
|
|
|
/// * `url` - It takes the url of the current page that requested the search results for a
|
|
|
|
/// particular search query.
|
|
|
|
/// * `config` - It takes a parsed config struct.
|
|
|
|
/// * `query` - It takes the page number as u32 value.
|
|
|
|
/// * `req` - It takes the `HttpRequest` struct as a value.
|
|
|
|
///
|
|
|
|
/// # Error
|
|
|
|
///
|
|
|
|
/// It returns the `SearchResults` struct if the search results could be successfully fetched from
|
|
|
|
/// the cache or from the upstream search engines otherwise it returns an appropriate error.
|
2023-07-04 15:11:30 -07:00
|
|
|
async fn results(
|
2023-07-03 19:30:25 +02:00
|
|
|
url: String,
|
|
|
|
config: &Config,
|
2023-09-09 18:17:29 +02:00
|
|
|
cache: &web::Data<SharedCache>,
|
2023-08-27 21:06:25 +03:00
|
|
|
query: &str,
|
2023-07-03 19:30:25 +02:00
|
|
|
page: u32,
|
2023-07-15 19:50:31 +03:00
|
|
|
req: HttpRequest,
|
2023-09-02 17:50:06 +03:00
|
|
|
safe_search: u8,
|
2023-07-03 19:30:25 +02:00
|
|
|
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
|
|
|
// fetch the cached results json.
|
2023-09-11 23:20:05 +02:00
|
|
|
let cached_results = cache.cached_json(&url).await;
|
2023-07-17 10:50:15 +03:00
|
|
|
// check if fetched cache results was indeed fetched or it was an error and if so
|
2023-07-03 19:30:25 +02:00
|
|
|
// handle the data accordingly.
|
2023-09-11 23:20:05 +02:00
|
|
|
match cached_results {
|
|
|
|
Ok(results) => Ok(results),
|
|
|
|
Err(_) => {
|
2023-09-02 17:50:06 +03:00
|
|
|
if safe_search == 4 {
|
|
|
|
let mut results: SearchResults = SearchResults::default();
|
|
|
|
let mut _flag: bool =
|
2023-09-10 18:56:54 +03:00
|
|
|
is_match_from_filter_list(file_path(FileType::BlockList)?, query)?;
|
|
|
|
_flag = !is_match_from_filter_list(file_path(FileType::AllowList)?, query)?;
|
2023-09-02 17:50:06 +03:00
|
|
|
|
|
|
|
if _flag {
|
|
|
|
results.set_disallowed();
|
|
|
|
results.add_style(&config.style);
|
2023-09-10 18:56:54 +03:00
|
|
|
results.set_page_query(query);
|
2023-09-12 07:37:33 +02:00
|
|
|
cache.cache_results(&results, &url).await?;
|
2023-09-02 17:50:06 +03:00
|
|
|
return Ok(results);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-15 19:50:31 +03:00
|
|
|
// check if the cookie value is empty or not if it is empty then use the
|
|
|
|
// default selected upstream search engines from the config file otherwise
|
|
|
|
// parse the non-empty cookie and grab the user selected engines from the
|
|
|
|
// UI and use that.
|
2023-09-03 20:50:50 +03:00
|
|
|
let mut results: SearchResults = match req.cookie("appCookie") {
|
2023-07-15 19:50:31 +03:00
|
|
|
Some(cookie_value) => {
|
2023-09-12 17:59:33 +03:00
|
|
|
let cookie_value: Cookie<'_> =
|
|
|
|
serde_json::from_str(cookie_value.name_value().1)?;
|
2023-08-18 10:43:53 +02:00
|
|
|
|
2023-08-27 21:06:25 +03:00
|
|
|
let engines: Vec<EngineHandler> = cookie_value
|
2023-08-18 10:43:53 +02:00
|
|
|
.engines
|
|
|
|
.iter()
|
|
|
|
.filter_map(|name| EngineHandler::new(name))
|
|
|
|
.collect();
|
|
|
|
|
2023-09-23 12:50:36 +03:00
|
|
|
match engines.is_empty() {
|
|
|
|
false => {
|
|
|
|
aggregate(
|
|
|
|
query,
|
|
|
|
page,
|
|
|
|
config.aggregator.random_delay,
|
|
|
|
config.debug,
|
|
|
|
&engines,
|
|
|
|
config.request_timeout,
|
|
|
|
safe_search,
|
|
|
|
)
|
|
|
|
.await?
|
|
|
|
}
|
|
|
|
true => {
|
|
|
|
let mut search_results = SearchResults::default();
|
|
|
|
search_results.set_no_engines_selected();
|
|
|
|
search_results.set_page_query(query);
|
|
|
|
search_results
|
|
|
|
}
|
|
|
|
}
|
2023-07-15 19:50:31 +03:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
aggregate(
|
|
|
|
query,
|
|
|
|
page,
|
|
|
|
config.aggregator.random_delay,
|
|
|
|
config.debug,
|
2023-08-27 21:06:25 +03:00
|
|
|
&config.upstream_search_engines,
|
2023-07-30 10:53:48 +03:00
|
|
|
config.request_timeout,
|
2023-09-02 17:50:06 +03:00
|
|
|
safe_search,
|
2023-07-15 19:50:31 +03:00
|
|
|
)
|
|
|
|
.await?
|
|
|
|
}
|
|
|
|
};
|
2023-09-23 12:50:36 +03:00
|
|
|
if results.engine_errors_info().is_empty()
|
|
|
|
&& results.results().is_empty()
|
|
|
|
&& !results.no_engines_selected()
|
|
|
|
{
|
2023-09-02 17:50:06 +03:00
|
|
|
results.set_filtered();
|
|
|
|
}
|
|
|
|
results.add_style(&config.style);
|
2023-09-12 07:37:33 +02:00
|
|
|
cache.cache_results(&results, &url).await?;
|
2023-07-17 13:17:24 +03:00
|
|
|
Ok(results)
|
2023-07-03 19:30:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-12 18:53:32 +03:00
|
|
|
/// A helper function which checks whether the search query contains any keywords which should be
|
|
|
|
/// disallowed/allowed based on the regex based rules present in the blocklist and allowlist files.
|
2023-09-17 12:51:32 +03:00
|
|
|
///
|
|
|
|
/// # Arguments
|
|
|
|
///
|
|
|
|
/// * `file_path` - It takes the file path of the list as the argument.
|
|
|
|
/// * `query` - It takes the search query to be checked against the list as an argument.
|
|
|
|
///
|
|
|
|
/// # Error
|
|
|
|
///
|
|
|
|
/// Returns a bool indicating whether the results were found in the list or not on success
|
|
|
|
/// otherwise returns a standard error type on a failure.
|
2023-09-02 17:50:06 +03:00
|
|
|
fn is_match_from_filter_list(
|
|
|
|
file_path: &str,
|
|
|
|
query: &str,
|
|
|
|
) -> Result<bool, Box<dyn std::error::Error>> {
|
|
|
|
let mut flag = false;
|
|
|
|
let mut reader = BufReader::new(File::open(file_path)?);
|
|
|
|
for line in reader.by_ref().lines() {
|
|
|
|
let re = Regex::new(&line?)?;
|
|
|
|
if re.is_match(query) {
|
|
|
|
flag = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(flag)
|
|
|
|
}
|