parent
7fd9246ee4
commit
a799af3125
6 changed files with 158 additions and 59 deletions
|
@ -6,7 +6,7 @@
|
|||
use std::net::TcpListener;
|
||||
use websurfx::{config::parser::Config, run};
|
||||
|
||||
/// A dhat heap memory profiler
|
||||
/// A dhat heap memory profiler
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
#[global_allocator]
|
||||
static ALLOC: dhat::Alloc = dhat::Alloc;
|
||||
|
@ -22,7 +22,7 @@ async fn main() -> std::io::Result<()> {
|
|||
// A dhat heap profiler initialization.
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
let _profiler = dhat::Profiler::new_heap();
|
||||
|
||||
|
||||
// Initialize the parsed config file.
|
||||
let config = Config::parse(false).unwrap();
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ use handler::paths::{file_path, FileType};
|
|||
pub fn run(listener: TcpListener, config: Config) -> std::io::Result<Server> {
|
||||
let mut handlebars: Handlebars = Handlebars::new();
|
||||
|
||||
let public_folder_path: String = file_path(FileType::Theme)?;
|
||||
let public_folder_path: &str = file_path(FileType::Theme)?;
|
||||
|
||||
handlebars
|
||||
.register_templates_directory(".html", format!("{}/templates", public_folder_path))
|
||||
|
|
|
@ -71,7 +71,7 @@ pub async fn aggregate(
|
|||
upstream_search_engines: Vec<EngineHandler>,
|
||||
request_timeout: u8,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
let user_agent: String = random_user_agent();
|
||||
let user_agent: &str = random_user_agent();
|
||||
|
||||
// Add a random delay before making the request.
|
||||
if random_delay || !debug {
|
||||
|
@ -89,10 +89,9 @@ pub async fn aggregate(
|
|||
let (name, search_engine) = engine_handler.into_name_engine();
|
||||
names.push(name);
|
||||
let query: String = query.clone();
|
||||
let user_agent: String = user_agent.clone();
|
||||
tasks.push(tokio::spawn(async move {
|
||||
search_engine
|
||||
.results(query, page, user_agent.clone(), request_timeout)
|
||||
.results(query, page, user_agent.to_owned(), request_timeout)
|
||||
.await
|
||||
}));
|
||||
}
|
||||
|
@ -155,13 +154,13 @@ pub async fn aggregate(
|
|||
filter_with_lists(
|
||||
&mut result_map,
|
||||
&mut blacklist_map,
|
||||
&file_path(FileType::BlockList)?,
|
||||
file_path(FileType::BlockList)?,
|
||||
)?;
|
||||
|
||||
filter_with_lists(
|
||||
&mut blacklist_map,
|
||||
&mut result_map,
|
||||
&file_path(FileType::AllowList)?,
|
||||
file_path(FileType::AllowList)?,
|
||||
)?;
|
||||
|
||||
drop(blacklist_map);
|
||||
|
|
|
@ -159,9 +159,9 @@ async fn results(
|
|||
req: HttpRequest,
|
||||
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
||||
//Initialize redis cache connection struct
|
||||
let mut redis_cache = RedisCache::new(config.redis_url.clone())?;
|
||||
let mut redis_cache = RedisCache::new(&config.redis_url, 5).await?;
|
||||
// fetch the cached results json.
|
||||
let cached_results_json = redis_cache.cached_json(&url);
|
||||
let cached_results_json = redis_cache.cached_json(&url).await;
|
||||
// check if fetched cache results was indeed fetched or it was an error and if so
|
||||
// handle the data accordingly.
|
||||
match cached_results_json {
|
||||
|
@ -206,7 +206,9 @@ async fn results(
|
|||
}
|
||||
};
|
||||
results.add_style(config.style.clone());
|
||||
redis_cache.cache_results(serde_json::to_string(&results)?, &url)?;
|
||||
redis_cache
|
||||
.cache_results(&serde_json::to_string(&results)?, &url)
|
||||
.await?;
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue