2023-04-27 16:06:59 +03:00
|
|
|
//! The `searx` module handles the scraping of results from the searx search engine instance
|
|
|
|
//! by querying the upstream searx search engine instance with user provided query and with a page
|
|
|
|
//! number if provided.
|
|
|
|
|
2023-05-02 11:58:21 +03:00
|
|
|
use rand::Rng;
|
|
|
|
use reqwest::header::{HeaderMap, CONTENT_TYPE, REFERER, USER_AGENT};
|
2023-04-22 14:35:07 +03:00
|
|
|
use scraper::{Html, Selector};
|
2023-05-02 11:58:21 +03:00
|
|
|
use std::{collections::HashMap, time::Duration};
|
2023-04-22 14:35:07 +03:00
|
|
|
|
|
|
|
use crate::search_results_handler::aggregation_models::RawSearchResult;
|
|
|
|
|
2023-04-27 16:06:59 +03:00
|
|
|
/// This function scrapes results from the upstream engine duckduckgo and puts all the scraped
|
|
|
|
/// results like title, visiting_url (href in html),engine (from which engine it was fetched from)
|
|
|
|
/// and description in a RawSearchResult and then adds that to HashMap whose keys are url and
|
|
|
|
/// values are RawSearchResult struct and then returns it within a Result enum.
|
|
|
|
///
|
|
|
|
/// # Arguments
|
|
|
|
///
|
|
|
|
/// * `query` - Takes the user provided query to query to the upstream search engine with.
|
2023-05-02 11:58:21 +03:00
|
|
|
/// * `page` - Takes an u32 as an argument.
|
2023-04-27 16:06:59 +03:00
|
|
|
/// * `user_agent` - Takes a random user agent string as an argument.
|
|
|
|
///
|
|
|
|
/// # Errors
|
|
|
|
///
|
|
|
|
/// Returns a reqwest error if the user is not connected to the internet or if their is failure to
|
2023-04-27 17:53:28 +03:00
|
|
|
/// reach the above `upstream search engine` page and also returns error if the scraping
|
2023-04-27 16:06:59 +03:00
|
|
|
/// selector fails to initialize"
|
2023-04-22 14:35:07 +03:00
|
|
|
pub async fn results(
|
|
|
|
query: &str,
|
2023-05-02 11:58:21 +03:00
|
|
|
page: u32,
|
2023-04-22 14:35:07 +03:00
|
|
|
user_agent: &str,
|
|
|
|
) -> Result<HashMap<String, RawSearchResult>, Box<dyn std::error::Error>> {
|
|
|
|
// Page number can be missing or empty string and so appropriate handling is required
|
|
|
|
// so that upstream server recieves valid page number.
|
2023-05-02 11:58:21 +03:00
|
|
|
let url: String = format!("https://searx.work/search?q={query}&pageno={page}");
|
|
|
|
|
|
|
|
// Add random delay before making the request.
|
|
|
|
let mut rng = rand::thread_rng();
|
|
|
|
let delay_secs = rng.gen_range(1, 10);
|
|
|
|
std::thread::sleep(Duration::from_secs(delay_secs));
|
|
|
|
|
|
|
|
// initializing headers and adding appropriate headers.
|
|
|
|
let mut header_map = HeaderMap::new();
|
|
|
|
header_map.insert(USER_AGENT, user_agent.parse()?);
|
|
|
|
header_map.insert(REFERER, "https://google.com/".parse()?);
|
|
|
|
header_map.insert(CONTENT_TYPE, "application/x-www-form-urlencoded".parse()?);
|
2023-04-22 14:35:07 +03:00
|
|
|
|
|
|
|
// fetch the html from upstream searx instance engine
|
|
|
|
// TODO: Write better error handling code to handle no results case.
|
|
|
|
let results: String = reqwest::Client::new()
|
|
|
|
.get(url)
|
2023-05-02 11:58:21 +03:00
|
|
|
.headers(header_map) // add spoofed headers to emulate human behaviours.
|
2023-04-22 14:35:07 +03:00
|
|
|
.send()
|
|
|
|
.await?
|
|
|
|
.text()
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let document: Html = Html::parse_document(&results);
|
|
|
|
let results: Selector = Selector::parse(".result")?;
|
|
|
|
let result_title: Selector = Selector::parse("h3>a")?;
|
|
|
|
let result_url: Selector = Selector::parse("h3>a")?;
|
|
|
|
let result_desc: Selector = Selector::parse(".content")?;
|
|
|
|
|
|
|
|
// scrape all the results from the html
|
2023-04-25 16:30:04 +03:00
|
|
|
Ok(document
|
|
|
|
.select(&results)
|
|
|
|
.map(|result| {
|
|
|
|
RawSearchResult::new(
|
|
|
|
result
|
|
|
|
.select(&result_title)
|
|
|
|
.next()
|
|
|
|
.unwrap()
|
|
|
|
.inner_html()
|
|
|
|
.trim()
|
|
|
|
.to_string(),
|
|
|
|
result
|
|
|
|
.select(&result_url)
|
|
|
|
.next()
|
|
|
|
.unwrap()
|
|
|
|
.value()
|
|
|
|
.attr("href")
|
|
|
|
.unwrap()
|
|
|
|
.to_string(),
|
|
|
|
result
|
|
|
|
.select(&result_desc)
|
|
|
|
.next()
|
|
|
|
.unwrap()
|
|
|
|
.inner_html()
|
|
|
|
.trim()
|
|
|
|
.to_string(),
|
|
|
|
vec!["searx".to_string()],
|
|
|
|
)
|
|
|
|
})
|
|
|
|
.map(|search_result| (search_result.visiting_url.clone(), search_result))
|
|
|
|
.collect())
|
2023-04-22 14:35:07 +03:00
|
|
|
}
|