2023-04-27 17:53:28 +03:00
|
|
|
//! This module provides the functionality to scrape and gathers all the results from the upstream
|
|
|
|
//! search engines and then removes duplicate results.
|
|
|
|
|
2023-05-07 21:18:19 +03:00
|
|
|
use std::{collections::HashMap, time::Duration};
|
|
|
|
|
|
|
|
use rand::Rng;
|
|
|
|
use tokio::join;
|
2023-04-22 14:35:07 +03:00
|
|
|
|
2023-04-25 16:30:04 +03:00
|
|
|
use super::{
|
|
|
|
aggregation_models::{RawSearchResult, SearchResult, SearchResults},
|
|
|
|
user_agent::random_user_agent,
|
|
|
|
};
|
2023-04-22 14:35:07 +03:00
|
|
|
|
|
|
|
use crate::engines::{duckduckgo, searx};
|
|
|
|
|
2023-04-27 17:53:28 +03:00
|
|
|
/// A function that aggregates all the scraped results from the above upstream engines and
|
|
|
|
/// then removes duplicate results and if two results are found to be from two or more engines
|
|
|
|
/// then puts their names together to show the results are fetched from these upstream engines
|
|
|
|
/// and then removes all data from the HashMap and puts into a struct of all results aggregated
|
2023-05-07 21:18:19 +03:00
|
|
|
/// into a vector and also adds the query used into the struct this is neccessory because
|
2023-04-27 17:53:28 +03:00
|
|
|
/// otherwise the search bar in search remains empty if searched from the query url
|
|
|
|
///
|
|
|
|
/// # Example:
|
|
|
|
///
|
|
|
|
/// If you search from the url like `https://127.0.0.1/search?q=huston` then the search bar should
|
|
|
|
/// contain the word huston and not remain empty.
|
2023-05-07 21:18:19 +03:00
|
|
|
///
|
2023-04-27 17:53:28 +03:00
|
|
|
/// # Arguments
|
|
|
|
///
|
|
|
|
/// * `query` - Accepts a string to query with the above upstream search engines.
|
2023-05-02 11:58:21 +03:00
|
|
|
/// * `page` - Accepts an u32 page number.
|
2023-04-27 17:53:28 +03:00
|
|
|
///
|
|
|
|
/// # Error
|
|
|
|
///
|
2023-05-07 21:18:19 +03:00
|
|
|
/// Returns an error a reqwest and scraping selector errors if any error occurs in the results
|
2023-04-27 17:53:28 +03:00
|
|
|
/// function in either `searx` or `duckduckgo` or both otherwise returns a `SearchResults struct`
|
|
|
|
/// containing appropriate values.
|
2023-04-22 14:35:07 +03:00
|
|
|
pub async fn aggregate(
|
|
|
|
query: &str,
|
2023-05-02 11:58:21 +03:00
|
|
|
page: u32,
|
2023-04-22 14:35:07 +03:00
|
|
|
) -> Result<SearchResults, Box<dyn std::error::Error>> {
|
2023-04-25 16:30:04 +03:00
|
|
|
let user_agent: String = random_user_agent();
|
2023-04-22 14:35:07 +03:00
|
|
|
let mut result_map: HashMap<String, RawSearchResult> = HashMap::new();
|
|
|
|
|
2023-05-07 21:18:19 +03:00
|
|
|
// Add a random delay before making the request.
|
|
|
|
let mut rng = rand::thread_rng();
|
|
|
|
let delay_secs = rng.gen_range(1, 10);
|
|
|
|
std::thread::sleep(Duration::from_secs(delay_secs));
|
|
|
|
|
|
|
|
// fetch results from upstream search engines simultaneously/concurrently.
|
|
|
|
let (ddg_map_results, searx_map_results) = join!(
|
|
|
|
duckduckgo::results(query, page, &user_agent),
|
|
|
|
searx::results(query, page, &user_agent)
|
|
|
|
);
|
|
|
|
|
|
|
|
let ddg_map_results: HashMap<String, RawSearchResult> = ddg_map_results?;
|
|
|
|
let searx_map_results: HashMap<String, RawSearchResult> = searx_map_results?;
|
2023-04-22 14:35:07 +03:00
|
|
|
|
|
|
|
result_map.extend(ddg_map_results);
|
|
|
|
|
2023-04-25 16:30:04 +03:00
|
|
|
searx_map_results.into_iter().for_each(|(key, value)| {
|
|
|
|
result_map
|
|
|
|
.entry(key)
|
|
|
|
.and_modify(|result| {
|
2023-04-26 17:46:49 +03:00
|
|
|
result.add_engines(value.clone().engine());
|
2023-04-25 16:30:04 +03:00
|
|
|
})
|
|
|
|
.or_insert_with(|| -> RawSearchResult {
|
|
|
|
RawSearchResult::new(
|
|
|
|
value.title.clone(),
|
|
|
|
value.visiting_url.clone(),
|
|
|
|
value.description.clone(),
|
|
|
|
value.engine.clone(),
|
|
|
|
)
|
|
|
|
});
|
|
|
|
});
|
2023-04-22 14:35:07 +03:00
|
|
|
|
2023-04-25 16:30:04 +03:00
|
|
|
Ok(SearchResults::new(
|
|
|
|
result_map
|
|
|
|
.into_iter()
|
|
|
|
.map(|(key, value)| {
|
|
|
|
SearchResult::new(
|
|
|
|
value.title,
|
|
|
|
value.visiting_url,
|
|
|
|
key,
|
|
|
|
value.description,
|
|
|
|
value.engine,
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.collect(),
|
|
|
|
query.to_string(),
|
|
|
|
))
|
2023-04-22 14:35:07 +03:00
|
|
|
}
|