diff --git a/src/lib.rs b/src/lib.rs index fe8ee92..cd83d8a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,7 +17,7 @@ use actix_files as fs; use actix_web::{dev::Server, http::header, middleware::Logger, web, App, HttpServer}; use config::parser::Config; use handlebars::Handlebars; -use handler::public_paths::public_path; +use handler::paths::{file_path, FileType}; /// Runs the web server on the provided TCP listener and returns a `Server` instance. /// @@ -42,7 +42,7 @@ use handler::public_paths::public_path; pub fn run(listener: TcpListener, config: Config) -> std::io::Result { let mut handlebars: Handlebars = Handlebars::new(); - let public_folder_path: String = public_path()?; + let public_folder_path: String = file_path(FileType::Theme)?; handlebars .register_templates_directory(".html", format!("{}/templates", public_folder_path)) diff --git a/src/server/routes.rs b/src/server/routes.rs index 77210b2..93c6fd5 100644 --- a/src/server/routes.rs +++ b/src/server/routes.rs @@ -8,7 +8,7 @@ use crate::{ cache::cacher::RedisCache, config::parser::Config, engines::engine_models::EngineHandler, - handler::public_paths::public_path, + handler::paths::{file_path, FileType}, results::{aggregation_models::SearchResults, aggregator::aggregate}, }; use actix_web::{get, web, HttpRequest, HttpResponse}; @@ -215,7 +215,7 @@ async fn results( /// Handles the route of robots.txt page of the `websurfx` meta search engine website. #[get("/robots.txt")] pub async fn robots_data(_req: HttpRequest) -> Result> { - let page_content: String = read_to_string(format!("{}/robots.txt", public_path()?))?; + let page_content: String = read_to_string(format!("{}/robots.txt", file_path(FileType::Theme)?))?; Ok(HttpResponse::Ok() .content_type("text/plain; charset=ascii") .body(page_content))