crabbysearch/src/server/routes.rs

156 lines
5.4 KiB
Rust
Raw Normal View History

2023-04-27 17:53:28 +03:00
//! This module provides the functionality to handle different routes of the `websurfx`
//! meta search engine website and provide appropriate response to each route/page
2023-04-27 17:53:28 +03:00
//! when requested.
2023-04-22 14:35:07 +03:00
use std::fs::read_to_string;
use crate::{
cache::cacher::RedisCache,
config::parser::Config,
handler::public_paths::get_public_path,
results::{aggregation_models::SearchResults, aggregator::aggregate},
};
2023-04-22 14:35:07 +03:00
use actix_web::{get, web, HttpRequest, HttpResponse};
use handlebars::Handlebars;
use serde::Deserialize;
2023-04-27 17:53:28 +03:00
/// A named struct which deserializes all the user provided search parameters and stores them.
///
/// # Fields
///
/// * `q` - It stores the search parameter option `q` (or query in simple words)
/// of the search url.
/// * `page` - It stores the search parameter `page` (or pageno in simple words)
/// of the search url.
2023-04-22 14:35:07 +03:00
#[derive(Debug, Deserialize)]
struct SearchParams {
q: Option<String>,
page: Option<u32>,
}
2023-04-27 17:53:28 +03:00
/// Handles the route of index page or main page of the `websurfx` meta search engine website.
2023-04-22 14:35:07 +03:00
#[get("/")]
pub async fn index(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
2023-04-22 14:35:07 +03:00
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("index", &config.style).unwrap();
2023-04-22 14:35:07 +03:00
Ok(HttpResponse::Ok().body(page_content))
}
2023-04-27 17:53:28 +03:00
/// Handles the route of any other accessed route/page which is not provided by the
/// website essentially the 404 error page.
2023-04-22 14:35:07 +03:00
pub async fn not_found(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
2023-04-22 14:35:07 +03:00
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("404", &config.style)?;
2023-04-22 14:35:07 +03:00
Ok(HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(page_content))
}
2023-04-27 17:53:28 +03:00
/// Handles the route of search page of the `websurfx` meta search engine website and it takes
/// two search url parameters `q` and `page` where `page` parameter is optional.
///
/// # Example
///
/// ```bash
/// curl "http://127.0.0.1:8080/search?q=sweden&page=1"
/// ```
///
2023-04-27 17:53:28 +03:00
/// Or
///
/// ```bash
/// curl "http://127.0.0.1:8080/search?q=sweden"
/// ```
2023-04-22 14:35:07 +03:00
#[get("/search")]
pub async fn search(
hbs: web::Data<Handlebars<'_>>,
req: HttpRequest,
config: web::Data<Config>,
2023-04-22 14:35:07 +03:00
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let params = web::Query::<SearchParams>::from_query(req.query_string())?;
2023-04-22 14:35:07 +03:00
match &params.q {
Some(query) => {
if query.trim().is_empty() {
return Ok(HttpResponse::Found()
2023-04-22 14:35:07 +03:00
.insert_header(("location", "/"))
.finish());
2023-04-22 14:35:07 +03:00
}
let page = match &params.page {
Some(page) => *page,
None => 0,
};
let url = format!(
"http://{}:{}/search?q={}&page={}",
config.binding_ip, config.port, query, page
);
let results_json = get_results(url, &config, query, page).await?;
let page_content: String = hbs.render("search", &results_json)?;
Ok(HttpResponse::Ok().body(page_content))
2023-04-22 14:35:07 +03:00
}
None => Ok(HttpResponse::Found()
.insert_header(("location", "/"))
.finish()),
}
}
/// Fetches the results for a query and page.
/// First checks the redis cache, if that fails it gets proper results
async fn get_results(
url: String,
config: &Config,
query: &str,
page: u32,
) -> Result<SearchResults, Box<dyn std::error::Error>> {
//Initialize redis cache connection struct
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
// fetch the cached results json.
let cached_results_json = redis_cache.get_cached_json(&url);
// check if fetched results was indeed fetched or it was an error and if so
// handle the data accordingly.
match cached_results_json {
Ok(results_json) => Ok(serde_json::from_str::<SearchResults>(&results_json).unwrap()),
Err(_) => {
let mut results_json: crate::results::aggregation_models::SearchResults =
aggregate(query, page, config.aggregator.random_delay, config.debug).await?;
results_json.add_style(config.style.clone());
redis_cache.cache_results(serde_json::to_string(&results_json)?, &url)?;
Ok(results_json)
}
}
}
2023-04-27 17:53:28 +03:00
/// Handles the route of robots.txt page of the `websurfx` meta search engine website.
2023-04-22 14:35:07 +03:00
#[get("/robots.txt")]
pub async fn robots_data(_req: HttpRequest) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = read_to_string(format!("{}/robots.txt", get_public_path()?))?;
2023-04-22 14:35:07 +03:00
Ok(HttpResponse::Ok()
.content_type("text/plain; charset=ascii")
.body(page_content))
}
2023-04-27 17:53:28 +03:00
/// Handles the route of about page of the `websurfx` meta search engine website.
2023-04-22 14:35:07 +03:00
#[get("/about")]
pub async fn about(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
2023-04-22 14:35:07 +03:00
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("about", &config.style)?;
2023-04-22 14:35:07 +03:00
Ok(HttpResponse::Ok().body(page_content))
}
2023-04-27 17:53:28 +03:00
/// Handles the route of settings page of the `websurfx` meta search engine website.
2023-04-22 14:35:07 +03:00
#[get("/settings")]
pub async fn settings(
hbs: web::Data<Handlebars<'_>>,
config: web::Data<Config>,
2023-04-22 14:35:07 +03:00
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
let page_content: String = hbs.render("settings", &config.style)?;
2023-04-22 14:35:07 +03:00
Ok(HttpResponse::Ok().body(page_content))
}