Create separate search_result_parser

This commit is contained in:
Zsombor Gegesy 2023-09-24 13:54:08 +02:00
parent 769d870803
commit 75a77d25f0
8 changed files with 110 additions and 62 deletions

View file

@ -5,7 +5,7 @@
use std::collections::HashMap;
use reqwest::header::HeaderMap;
use scraper::{Html, Selector};
use scraper::Html;
use crate::models::aggregation_models::SearchResult;
@ -13,9 +13,27 @@ use crate::models::engine_models::{EngineError, SearchEngine};
use error_stack::{Report, Result, ResultExt};
use super::search_result_parser::SearchResultParser;
/// A new DuckDuckGo engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily.
pub struct DuckDuckGo;
pub struct DuckDuckGo {
parser: SearchResultParser,
}
impl DuckDuckGo {
pub fn new() -> Result<Self, EngineError> {
Ok(Self {
parser: SearchResultParser::new(
".no-results",
".result",
".result__a",
".result__url",
".result__snippet",
)?,
})
}
}
#[async_trait::async_trait]
impl SearchEngine for DuckDuckGo {
@ -59,34 +77,17 @@ impl SearchEngine for DuckDuckGo {
&DuckDuckGo::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
);
let no_result: Selector = Selector::parse(".no-results")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".no-results"))?;
if document.select(&no_result).next().is_some() {
if document.select(&self.parser.no_result).next().is_some() {
return Err(Report::new(EngineError::EmptyResultSet));
}
let results: Selector = Selector::parse(".result")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?;
let result_title: Selector = Selector::parse(".result__a")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__a"))?;
let result_url: Selector = Selector::parse(".result__url")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__url"))?;
let result_desc: Selector = Selector::parse(".result__snippet")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result__snippet"))?;
// scrape all the results from the html
Ok(document
.select(&results)
.select(&self.parser.results)
.map(|result| {
SearchResult::new(
result
.select(&result_title)
.select(&self.parser.result_title)
.next()
.unwrap()
.inner_html()
@ -94,7 +95,7 @@ impl SearchEngine for DuckDuckGo {
format!(
"https://{}",
result
.select(&result_url)
.select(&self.parser.result_url)
.next()
.unwrap()
.inner_html()
@ -102,7 +103,7 @@ impl SearchEngine for DuckDuckGo {
)
.as_str(),
result
.select(&result_desc)
.select(&self.parser.result_desc)
.next()
.unwrap()
.inner_html()

View file

@ -4,4 +4,5 @@
//! code. Moreover, it also provides a custom error for the upstream search engine handling code.
pub mod duckduckgo;
pub mod search_result_parser;
pub mod searx;

View file

@ -0,0 +1,38 @@
use crate::models::engine_models::EngineError;
use error_stack::{Report, Result, ResultExt};
use scraper::{Html, Selector};
pub struct SearchResultParser {
pub no_result: Selector,
pub results: Selector,
pub result_title: Selector,
pub result_url: Selector,
pub result_desc: Selector,
}
impl SearchResultParser {
pub fn new(
no_result_selector: &str,
results_selector: &str,
result_title_selector: &str,
result_url_selector: &str,
result_desc_selector: &str,
) -> Result<SearchResultParser, EngineError> {
Ok(SearchResultParser {
no_result: new_selector(no_result_selector)?,
results: new_selector(results_selector)?,
result_title: new_selector(result_title_selector)?,
result_url: new_selector(result_url_selector)?,
result_desc: new_selector(result_desc_selector)?,
})
}
}
fn new_selector(selector: &str) -> Result<Selector, EngineError> {
Selector::parse(selector).map_err(|err| {
Report::new(EngineError::UnexpectedError).attach_printable(format!(
"invalid CSS selector: {}, err: {:?}",
selector, err
))
})
}

View file

@ -3,16 +3,34 @@
//! number if provided.
use reqwest::header::HeaderMap;
use scraper::{Html, Selector};
use scraper::Html;
use std::collections::HashMap;
use super::search_result_parser::SearchResultParser;
use crate::models::aggregation_models::SearchResult;
use crate::models::engine_models::{EngineError, SearchEngine};
use error_stack::{Report, Result, ResultExt};
/// A new Searx engine type defined in-order to implement the `SearchEngine` trait which allows to
/// reduce code duplication as well as allows to create vector of different search engines easily.
pub struct Searx;
pub struct Searx {
parser: SearchResultParser,
}
impl Searx {
// new Searchx engine
pub fn new() -> Result<Searx, EngineError> {
Ok(Self {
parser: SearchResultParser::new(
"#urls>.dialog-error>p",
".result",
"h3>a",
"h3>a",
".content",
)?,
})
}
}
#[async_trait::async_trait]
impl SearchEngine for Searx {
@ -52,13 +70,7 @@ impl SearchEngine for Searx {
&Searx::fetch_html_from_upstream(self, &url, header_map, request_timeout).await?,
);
let no_result: Selector = Selector::parse("#urls>.dialog-error>p")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| {
format!("invalid CSS selector: {}", "#urls>.dialog-error>p")
})?;
if let Some(no_result_msg) = document.select(&no_result).nth(1) {
if let Some(no_result_msg) = document.select(&self.parser.no_result).nth(1) {
if no_result_msg.inner_html()
== "we didn't find any results. Please use another query or search in more categories"
{
@ -66,40 +78,26 @@ impl SearchEngine for Searx {
}
}
let results: Selector = Selector::parse(".result")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".result"))?;
let result_title: Selector = Selector::parse("h3>a")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?;
let result_url: Selector = Selector::parse("h3>a")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", "h3>a"))?;
let result_desc: Selector = Selector::parse(".content")
.map_err(|_| Report::new(EngineError::UnexpectedError))
.attach_printable_lazy(|| format!("invalid CSS selector: {}", ".content"))?;
// scrape all the results from the html
Ok(document
.select(&results)
.select(&self.parser.results)
.map(|result| {
SearchResult::new(
result
.select(&result_title)
.select(&self.parser.result_title)
.next()
.unwrap()
.inner_html()
.trim(),
result
.select(&result_url)
.select(&self.parser.result_url)
.next()
.unwrap()
.value()
.attr("href")
.unwrap(),
result
.select(&result_desc)
.select(&self.parser.result_desc)
.next()
.unwrap()
.inner_html()