Compare commits
No commits in common. "rolling" and "v1.21.1" have entirely different histories.
19 changed files with 580 additions and 1539 deletions
1610
Cargo.lock
generated
1610
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
27
Cargo.toml
27
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "websurfx"
|
||||
version = "1.21.0"
|
||||
version = "1.18.0"
|
||||
edition = "2021"
|
||||
description = "An open-source alternative to Searx that provides clean, ad-free, and organic results with incredible speed while keeping privacy and security in mind."
|
||||
repository = "https://github.com/neon-mmd/websurfx"
|
||||
|
@ -20,14 +20,14 @@ reqwest = { version = "0.12.5", default-features = false, features = [
|
|||
"http2",
|
||||
"socks",
|
||||
] }
|
||||
tokio = { version = "1.43.0", features = [
|
||||
tokio = { version = "1.41.0", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
"fs",
|
||||
"io-util",
|
||||
], default-features = false }
|
||||
serde = { version = "1.0.215", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0.134", default-features = false }
|
||||
serde_json = { version = "1.0.122", default-features = false }
|
||||
bincode = {version="1.3.3", default-features=false}
|
||||
maud = { version = "0.26.0", default-features = false, features = [
|
||||
"actix-web",
|
||||
|
@ -41,19 +41,19 @@ actix-web = { version = "4.9.0", features = [
|
|||
actix-files = { version = "0.6.5", default-features = false }
|
||||
actix-cors = { version = "0.7.0", default-features = false }
|
||||
fake-useragent = { version = "0.1.3", default-features = false }
|
||||
env_logger = { version = "0.11.6", default-features = false }
|
||||
env_logger = { version = "0.11.1", default-features = false }
|
||||
log = { version = "0.4.21", default-features = false }
|
||||
mlua = { version = "0.10.2", features = [
|
||||
mlua = { version = "0.9.9", features = [
|
||||
"luajit",
|
||||
"vendored",
|
||||
], default-features = false }
|
||||
redis = { version = "0.28.1", features = [
|
||||
redis = { version = "0.27.5", features = [
|
||||
"tokio-comp",
|
||||
"connection-manager",
|
||||
"tcp_nodelay"
|
||||
], default-features = false, optional = true }
|
||||
blake3 = { version = "1.5.4", default-features = false }
|
||||
error-stack = { version = "0.4.0", default-features = false, features = [
|
||||
blake3 = { version = "1.5.5", default-features = false }
|
||||
error-stack = { version = "0.5.0", default-features = false, features = [
|
||||
"std",
|
||||
] }
|
||||
async-trait = { version = "0.1.80", default-features = false }
|
||||
|
@ -61,8 +61,8 @@ regex = { version = "1.11.1", features = ["perf"], default-features = false }
|
|||
futures = { version = "0.3.31", default-features = false, features = ["alloc"] }
|
||||
dhat = { version = "0.3.2", optional = true, default-features = false }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
async-once-cell = { version = "0.5.4", default-features = false }
|
||||
actix-governor = { version = "0.8.0", default-features = false }
|
||||
async-once-cell = { version = "0.5.3", default-features = false }
|
||||
actix-governor = { version = "0.7.0", default-features = false }
|
||||
moka = { version = "0.12.8", optional = true, default-features = false, features = [
|
||||
"future",
|
||||
] }
|
||||
|
@ -87,12 +87,7 @@ stop-words = { version = "0.8.0", default-features = false, features = ["iso"] }
|
|||
thesaurus = { version = "0.5.2", default-features = false, optional = true, features = [
|
||||
"moby",
|
||||
]}
|
||||
|
||||
actix-multipart = { version = "0.7.2", default-features = false, features = [
|
||||
"derive",
|
||||
"tempfile",
|
||||
]}
|
||||
itertools = {version = "0.14.0", default-features = false}
|
||||
itertools = {version = "0.13.0", default-features = false}
|
||||
|
||||
[dev-dependencies]
|
||||
rusty-hook = { version = "^0.11.2", default-features = false }
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
font-weight: 200 600;
|
||||
font-stretch: 0% 200%;
|
||||
font-display: swap;
|
||||
src: url('https://fonts.gstatic.com/s/rubik/v28/iJWKBXyIfDnIV7nErXyi0A.woff2')
|
||||
format('woff2');
|
||||
src: url("https://fonts.gstatic.com/s/rubik/v28/iJWKBXyIfDnIV7nErXyi0A.woff2") format('woff2');
|
||||
}
|
||||
|
||||
* {
|
||||
|
@ -98,7 +97,7 @@ button {
|
|||
outline-offset: 3px;
|
||||
outline: 2px solid transparent;
|
||||
border: none;
|
||||
transition: 0.1s;
|
||||
transition: .1s;
|
||||
gap: 0;
|
||||
background-color: var(--color-six);
|
||||
color: var(--background-color);
|
||||
|
@ -108,10 +107,10 @@ button {
|
|||
}
|
||||
|
||||
.search_bar button img {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: 50%;
|
||||
transform: translate(-50%, -50%);
|
||||
position:absolute;
|
||||
left:50%;
|
||||
top:50%;
|
||||
transform:translate(-50%, -50%);
|
||||
}
|
||||
|
||||
.search_bar button:active {
|
||||
|
@ -254,6 +253,7 @@ button {
|
|||
|
||||
/* styles for the footer and header */
|
||||
|
||||
|
||||
header {
|
||||
width: 100%;
|
||||
background: var(--background-color);
|
||||
|
@ -341,7 +341,7 @@ footer div {
|
|||
.results_aggregated {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
justify-content: space-between;
|
||||
margin: 2rem 0;
|
||||
content-visibility: auto;
|
||||
}
|
||||
|
@ -714,8 +714,7 @@ footer div {
|
|||
}
|
||||
|
||||
.settings_container .user_interface select,
|
||||
.settings_container .general select,
|
||||
.settings_container .general form input {
|
||||
.settings_container .general select {
|
||||
margin: 0.7rem 0;
|
||||
width: 20rem;
|
||||
background-color: var(--color-one);
|
||||
|
@ -727,38 +726,6 @@ footer div {
|
|||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
.settings_container .general form input {
|
||||
padding: 0;
|
||||
width: 30rem;
|
||||
text-align: center;
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button {
|
||||
content: 'Browse';
|
||||
padding: 1rem 2rem;
|
||||
font-size: 1.5rem;
|
||||
background: var(--color-three);
|
||||
color: var(--background-color);
|
||||
border-radius: 0.5rem;
|
||||
border: 2px solid transparent;
|
||||
font-weight: bold;
|
||||
transition: all 0.1s ease-out;
|
||||
cursor: pointer;
|
||||
box-shadow: 5px 5px;
|
||||
outline: none;
|
||||
translate: -1rem 0;
|
||||
}
|
||||
|
||||
.settings_container .general form input::file-selector-button:active {
|
||||
box-shadow: none;
|
||||
translate: 5px 5px;
|
||||
}
|
||||
|
||||
.settings_container .general .export_btn {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.settings_container .user_interface option:hover,
|
||||
.settings_container .general option:hover {
|
||||
background-color: var(--color-one);
|
||||
|
@ -831,7 +798,7 @@ footer div {
|
|||
left: 0.4rem;
|
||||
bottom: 0.4rem;
|
||||
background-color: var(--background-color);
|
||||
transition: 0.2s;
|
||||
transition: .2s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
|
@ -855,7 +822,7 @@ input:checked + .slider::before {
|
|||
border-radius: 50%;
|
||||
}
|
||||
|
||||
@media screen and (width <=1136px) {
|
||||
@media screen and (width <= 1136px) {
|
||||
.hero-text-container {
|
||||
width: unset;
|
||||
}
|
||||
|
@ -865,7 +832,7 @@ input:checked + .slider::before {
|
|||
}
|
||||
}
|
||||
|
||||
@media screen and (width <=706px) {
|
||||
@media screen and (width <= 706px) {
|
||||
.about-container article .logo-container svg {
|
||||
width: clamp(200px, 290px, 815px);
|
||||
}
|
||||
|
@ -889,7 +856,7 @@ input:checked + .slider::before {
|
|||
.features {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
|
||||
.feature-list {
|
||||
padding: 35px 0;
|
||||
}
|
||||
|
|
2
src/cache/cacher.rs
vendored
2
src/cache/cacher.rs
vendored
|
@ -213,10 +213,12 @@ pub trait Cacher: Send + Sync {
|
|||
}
|
||||
|
||||
/// A helper function that compresses or encrypts search results before they're inserted into a cache store
|
||||
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `search_results` - A reference to the search_Results to process.
|
||||
///
|
||||
|
||||
///
|
||||
/// # Error
|
||||
/// Returns a Vec of compressed or encrypted bytes on success otherwise it returns a CacheError
|
||||
|
|
|
@ -78,11 +78,11 @@ impl Config {
|
|||
lua.load(&fs::read_to_string(file_path(FileType::Config)?)?)
|
||||
.exec()?;
|
||||
|
||||
let parsed_threads: u8 = globals.get("threads")?;
|
||||
let parsed_threads: u8 = globals.get::<_, u8>("threads")?;
|
||||
|
||||
let debug: bool = globals.get("debug")?;
|
||||
let logging: bool = globals.get("logging")?;
|
||||
let adaptive_window: bool = globals.get("adaptive_window")?;
|
||||
let debug: bool = globals.get::<_, bool>("debug")?;
|
||||
let logging: bool = globals.get::<_, bool>("logging")?;
|
||||
let adaptive_window: bool = globals.get::<_, bool>("adaptive_window")?;
|
||||
|
||||
if !logging_initialized {
|
||||
set_logging_level(debug, logging);
|
||||
|
@ -99,9 +99,9 @@ impl Config {
|
|||
parsed_threads
|
||||
};
|
||||
|
||||
let rate_limiter: HashMap<String, u8> = globals.get("rate_limiter")?;
|
||||
let rate_limiter = globals.get::<_, HashMap<String, u8>>("rate_limiter")?;
|
||||
|
||||
let parsed_safe_search: u8 = globals.get::<_>("safe_search")?;
|
||||
let parsed_safe_search: u8 = globals.get::<_, u8>("safe_search")?;
|
||||
let safe_search: u8 = match parsed_safe_search {
|
||||
0..=4 => parsed_safe_search,
|
||||
_ => {
|
||||
|
@ -112,7 +112,7 @@ impl Config {
|
|||
};
|
||||
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let parsed_cet = globals.get::<_>("cache_expiry_time")?;
|
||||
let parsed_cet = globals.get::<_, u16>("cache_expiry_time")?;
|
||||
#[cfg(any(feature = "redis-cache", feature = "memory-cache"))]
|
||||
let cache_expiry_time = match parsed_cet {
|
||||
0..=59 => {
|
||||
|
@ -125,7 +125,7 @@ impl Config {
|
|||
_ => parsed_cet,
|
||||
};
|
||||
|
||||
let proxy_opt: Option<String> = globals.get::<_>("proxy")?;
|
||||
let proxy_opt = globals.get::<_, Option<String>>("proxy")?;
|
||||
let proxy = proxy_opt.and_then(|proxy_str| {
|
||||
Proxy::all(proxy_str).ok().and_then(|_| {
|
||||
log::error!("Invalid proxy url, defaulting to no proxy.");
|
||||
|
@ -135,29 +135,30 @@ impl Config {
|
|||
|
||||
Ok(Config {
|
||||
operating_system_tls_certificates: globals
|
||||
.get::<_>("operating_system_tls_certificates")?,
|
||||
port: globals.get::<_>("port")?,
|
||||
binding_ip: globals.get::<_>("binding_ip")?,
|
||||
.get::<_, bool>("operating_system_tls_certificates")?,
|
||||
port: globals.get::<_, u16>("port")?,
|
||||
binding_ip: globals.get::<_, String>("binding_ip")?,
|
||||
style: Style::new(
|
||||
globals.get::<_>("theme")?,
|
||||
globals.get::<_>("colorscheme")?,
|
||||
globals.get::<_>("animation")?,
|
||||
globals.get::<_, String>("theme")?,
|
||||
globals.get::<_, String>("colorscheme")?,
|
||||
globals.get::<_, Option<String>>("animation")?,
|
||||
),
|
||||
#[cfg(feature = "redis-cache")]
|
||||
redis_url: globals.get::<_>("redis_url")?,
|
||||
redis_url: globals.get::<_, String>("redis_url")?,
|
||||
aggregator: AggregatorConfig {
|
||||
random_delay: globals.get::<_>("production_use")?,
|
||||
random_delay: globals.get::<_, bool>("production_use")?,
|
||||
},
|
||||
logging,
|
||||
debug,
|
||||
adaptive_window,
|
||||
upstream_search_engines: globals.get::<_>("upstream_search_engines")?,
|
||||
request_timeout: globals.get::<_>("request_timeout")?,
|
||||
tcp_connection_keep_alive: globals.get::<_>("tcp_connection_keep_alive")?,
|
||||
pool_idle_connection_timeout: globals.get::<_>("pool_idle_connection_timeout")?,
|
||||
number_of_https_connections: globals.get::<_>("number_of_https_connections")?,
|
||||
upstream_search_engines: globals
|
||||
.get::<_, HashMap<String, bool>>("upstream_search_engines")?,
|
||||
request_timeout: globals.get::<_, u8>("request_timeout")?,
|
||||
tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
|
||||
pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
|
||||
number_of_https_connections: globals.get::<_, u8>("number_of_https_connections")?,
|
||||
threads,
|
||||
client_connection_keep_alive: globals.get::<_>("client_connection_keep_alive")?,
|
||||
client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
|
||||
rate_limiter: RateLimiter {
|
||||
number_of_requests: rate_limiter["number_of_requests"],
|
||||
time_limit: rate_limiter["time_limit"],
|
||||
|
|
|
@ -15,7 +15,6 @@ use crate::models::engine_models::{EngineError, SearchEngine};
|
|||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::build_cookie;
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Bing engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
|
@ -74,7 +73,10 @@ impl SearchEngine for Bing {
|
|||
("_UR=QS=0&TQS", "0"),
|
||||
];
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
let mut cookie_string = String::new();
|
||||
for (k, v) in &query_params {
|
||||
cookie_string.push_str(&format!("{k}={v}; "));
|
||||
}
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
//! This module provides common functionalities for engines
|
||||
|
||||
/**
|
||||
* Build a query from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_query(query_params: &[(&str, &str)]) -> String {
|
||||
let mut query_params_string = String::new();
|
||||
for (k, v) in query_params {
|
||||
query_params_string.push_str(&format!("&{k}={v}"));
|
||||
}
|
||||
query_params_string
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a cookie from a list of key value pairs.
|
||||
*/
|
||||
pub fn build_cookie(cookie_params: &[(&str, &str)]) -> String {
|
||||
let mut cookie_string = String::new();
|
||||
for (k, v) in cookie_params {
|
||||
cookie_string.push_str(&format!("{k}={v}; "));
|
||||
}
|
||||
cookie_string
|
||||
}
|
|
@ -5,11 +5,9 @@
|
|||
|
||||
pub mod bing;
|
||||
pub mod brave;
|
||||
pub mod common;
|
||||
pub mod duckduckgo;
|
||||
pub mod librex;
|
||||
pub mod mojeek;
|
||||
pub mod search_result_parser;
|
||||
pub mod searx;
|
||||
pub mod startpage;
|
||||
pub mod wikipedia;
|
||||
|
|
|
@ -14,7 +14,6 @@ use crate::models::engine_models::{EngineError, SearchEngine};
|
|||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::{build_cookie, build_query};
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Mojeek engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
|
@ -108,7 +107,10 @@ impl SearchEngine for Mojeek {
|
|||
("safe", &safe),
|
||||
];
|
||||
|
||||
let query_params_string = build_query(&query_params);
|
||||
let mut query_params_string = String::new();
|
||||
for (k, v) in &query_params {
|
||||
query_params_string.push_str(&format!("&{k}={v}"));
|
||||
}
|
||||
|
||||
let url: String = match page {
|
||||
0 => {
|
||||
|
@ -121,7 +123,10 @@ impl SearchEngine for Mojeek {
|
|||
}
|
||||
};
|
||||
|
||||
let cookie_string = build_cookie(&query_params);
|
||||
let mut cookie_string = String::new();
|
||||
for (k, v) in &query_params {
|
||||
cookie_string.push_str(&format!("{k}={v}; "));
|
||||
}
|
||||
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
|
|
|
@ -1,101 +0,0 @@
|
|||
//! The `wikipedia` module handles the scraping of results from wikipedia
|
||||
//! with user provided query and with a page number if provided.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
|
||||
use crate::models::aggregation_models::SearchResult;
|
||||
|
||||
use crate::models::engine_models::{EngineError, SearchEngine};
|
||||
|
||||
use error_stack::{Report, Result, ResultExt};
|
||||
|
||||
use super::common::build_query;
|
||||
use super::search_result_parser::SearchResultParser;
|
||||
|
||||
/// A new Wikipedia engine type defined in-order to implement the `SearchEngine` trait which allows to
|
||||
/// reduce code duplication as well as allows to create vector of different search engines easily.
|
||||
pub struct Wikipedia {
|
||||
/// The parser, used to interpret the search result.
|
||||
parser: SearchResultParser,
|
||||
/// The id of the engine, equals to 'wikipedia-' + language
|
||||
id: String,
|
||||
/// The host where wikipedia can be accessed.
|
||||
host: String,
|
||||
}
|
||||
|
||||
impl Wikipedia {
|
||||
/// Creates the Wikipedia parser.
|
||||
pub fn new(language: &str) -> Result<Self, EngineError> {
|
||||
let host = format!("https://{}.wikipedia.org", &language);
|
||||
let id = format!("wikipedia-{}", &language);
|
||||
Ok(Self {
|
||||
parser: SearchResultParser::new(
|
||||
"p.mw-search-nonefound",
|
||||
".mw-search-results li.mw-search-result",
|
||||
".mw-search-result-heading a",
|
||||
".mw-search-result-heading a",
|
||||
".searchresult",
|
||||
)?,
|
||||
id,
|
||||
host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl SearchEngine for Wikipedia {
|
||||
async fn results(
|
||||
&self,
|
||||
query: &str,
|
||||
page: u32,
|
||||
user_agent: &str,
|
||||
client: &Client,
|
||||
_safe_search: u8,
|
||||
) -> Result<Vec<(String, SearchResult)>, EngineError> {
|
||||
let header_map = HeaderMap::try_from(&HashMap::from([
|
||||
("User-Agent".to_string(), user_agent.to_string()),
|
||||
("Referer".to_string(), self.host.to_string()),
|
||||
]))
|
||||
.change_context(EngineError::UnexpectedError)?;
|
||||
|
||||
let offset = (page * 20).to_string();
|
||||
let query_params: Vec<(&str, &str)> = vec![
|
||||
("limit", "20"),
|
||||
("offset", &offset),
|
||||
("profile", "default"),
|
||||
("search", query),
|
||||
("title", "Special:Search"),
|
||||
("ns0", "1"),
|
||||
];
|
||||
|
||||
let query_params_string = build_query(&query_params);
|
||||
|
||||
let url: String = format!("{}/w/index.php?{}", self.host, query_params_string);
|
||||
|
||||
let document: Html = Html::parse_document(
|
||||
&Wikipedia::fetch_html_from_upstream(self, &url, header_map, client).await?,
|
||||
);
|
||||
|
||||
if self.parser.parse_for_no_results(&document).next().is_some() {
|
||||
return Err(Report::new(EngineError::EmptyResultSet));
|
||||
}
|
||||
|
||||
// scrape all the results from the html
|
||||
self.parser
|
||||
.parse_for_results(&document, |title, url, desc| {
|
||||
let found_url = url.attr("href");
|
||||
found_url.map(|relative_url| {
|
||||
SearchResult::new(
|
||||
title.inner_html().trim(),
|
||||
&format!("{}{relative_url}", self.host),
|
||||
desc.inner_html().trim(),
|
||||
&[&self.id],
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
|
@ -110,7 +110,6 @@ pub fn run(
|
|||
.service(server::routes::search::search) // search page
|
||||
.service(router::about) // about page
|
||||
.service(router::settings) // settings page
|
||||
.service(server::routes::export_import::download) // download page
|
||||
.default_service(web::route().to(router::not_found)) // error page
|
||||
})
|
||||
.workers(config.threads as usize)
|
||||
|
|
|
@ -206,10 +206,6 @@ impl EngineHandler {
|
|||
let engine = crate::engines::bing::Bing::new()?;
|
||||
("bing", Box::new(engine))
|
||||
}
|
||||
"wikipedia" => {
|
||||
let engine = crate::engines::wikipedia::Wikipedia::new("en")?;
|
||||
("wikipedia", Box::new(engine))
|
||||
}
|
||||
_ => {
|
||||
return Err(Report::from(EngineError::NoSuchEngineFound(
|
||||
engine_name.to_string(),
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
/// order to allow the deserializing the json back to struct in aggregate function in
|
||||
/// aggregator.rs and create a new struct out of it and then serialize it back to json and pass
|
||||
/// it to the template files.
|
||||
#[derive(Default, Clone)]
|
||||
#[derive(Default)]
|
||||
pub struct Style {
|
||||
/// It stores the parsed theme option used to set a theme for the website.
|
||||
pub theme: String,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! engine website.
|
||||
use std::borrow::Cow;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::parser_models::Style;
|
||||
|
||||
|
@ -22,22 +22,16 @@ pub struct SearchParams {
|
|||
|
||||
/// A named struct which is used to deserialize the cookies fetched from the client side.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
#[derive(Deserialize)]
|
||||
pub struct Cookie<'a> {
|
||||
#[serde(borrow)]
|
||||
/// It stores the theme name used in the website.
|
||||
pub theme: Cow<'a, str>,
|
||||
#[serde(borrow)]
|
||||
/// It stores the colorscheme name used for the website theme.
|
||||
pub colorscheme: Cow<'a, str>,
|
||||
#[serde(borrow)]
|
||||
/// It stores the user selected upstream search engines selected from the UI.
|
||||
pub engines: Cow<'a, [Cow<'a, str>]>,
|
||||
/// It stores the user selected safe search level from the UI.
|
||||
pub safe_search_level: u8,
|
||||
#[serde(borrow)]
|
||||
/// It stores the animation name used for the website theme.
|
||||
pub animation: Option<Cow<'a, str>>,
|
||||
}
|
||||
|
||||
impl<'a> Cookie<'a> {
|
||||
|
@ -49,10 +43,6 @@ impl<'a> Cookie<'a> {
|
|||
colorscheme: Cow::Borrowed(&style.colorscheme),
|
||||
engines: Cow::Owned(engines),
|
||||
safe_search_level,
|
||||
animation: style
|
||||
.animation
|
||||
.as_ref()
|
||||
.map(|str| Cow::Borrowed(str.as_str())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,194 +0,0 @@
|
|||
//! This module handles the settings and download route of the search engine website.
|
||||
|
||||
use crate::{
|
||||
handler::{file_path, FileType},
|
||||
models::{self, server_models},
|
||||
Config,
|
||||
};
|
||||
use actix_multipart::form::{tempfile::TempFile, MultipartForm};
|
||||
use actix_web::{
|
||||
cookie::{
|
||||
time::{Duration, OffsetDateTime},
|
||||
Cookie,
|
||||
},
|
||||
get, post, web, HttpRequest, HttpResponse,
|
||||
};
|
||||
use std::borrow::Cow;
|
||||
use std::io::Read;
|
||||
|
||||
use tokio::fs::read_dir;
|
||||
|
||||
/// A helper function that helps in building the list of all available colorscheme/theme/animation
|
||||
/// names present in the colorschemes, animations and themes folder respectively by excluding the
|
||||
/// ones that have already been selected via the config file.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
|
||||
/// argument.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// Returns a list of colorscheme/theme names as a vector of tuple strings on success otherwise
|
||||
/// returns a standard error message.
|
||||
async fn style_option_list<'a>(
|
||||
style_type: &'a str,
|
||||
) -> Result<Box<[Cow<'a, str>]>, Box<dyn std::error::Error>> {
|
||||
let mut style_options = Vec::new();
|
||||
let mut dir = read_dir(format!(
|
||||
"{}static/{}/",
|
||||
file_path(FileType::Theme)?,
|
||||
style_type,
|
||||
))
|
||||
.await?;
|
||||
while let Some(file) = dir.next_entry().await? {
|
||||
let style_name = file.file_name().to_str().unwrap().replace(".css", "");
|
||||
style_options.push(Cow::Owned(style_name));
|
||||
}
|
||||
|
||||
if style_type == "animations" {
|
||||
style_options.push(Cow::default())
|
||||
}
|
||||
|
||||
Ok(style_options.into_boxed_slice())
|
||||
}
|
||||
|
||||
/// A helper function which santizes user provided json data from the input file.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `config` - It takes the config struct as an argument.
|
||||
/// * `setting_value` - It takes the cookie struct as an argument.
|
||||
///
|
||||
/// # Error
|
||||
///
|
||||
/// returns a standard error message on failure otherwise it returns the unit type.
|
||||
async fn sanitize(
|
||||
config: web::Data<&'static Config>,
|
||||
setting_value: &mut models::server_models::Cookie<'_>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Check whether the theme, colorscheme and animation option is valid by matching it against
|
||||
// the available option list. If the option provided by the user via the JSON file is invalid
|
||||
// then replace the user provided by the default one used by the server via the config file.
|
||||
|
||||
if !style_option_list("themes")
|
||||
.await?
|
||||
.contains(&setting_value.theme)
|
||||
{
|
||||
setting_value.theme = Cow::Borrowed(&config.style.theme)
|
||||
} else if !style_option_list("colorschemes")
|
||||
.await?
|
||||
.contains(&setting_value.colorscheme)
|
||||
{
|
||||
setting_value.colorscheme = Cow::Borrowed(&config.style.colorscheme)
|
||||
} else if !style_option_list("animations")
|
||||
.await?
|
||||
.contains(setting_value.animation.as_ref().unwrap())
|
||||
{
|
||||
setting_value.animation = config
|
||||
.style
|
||||
.animation
|
||||
.as_ref()
|
||||
.map(|str| Cow::Borrowed(str.as_str()));
|
||||
}
|
||||
|
||||
// Filters out any engines in the list that are invalid by matching each engine against the
|
||||
// available engine list.
|
||||
let engines: Vec<_> = setting_value
|
||||
.engines
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter_map(|engine| {
|
||||
config
|
||||
.upstream_search_engines
|
||||
.keys()
|
||||
.cloned()
|
||||
.any(|other_engine| *engine == other_engine)
|
||||
.then_some(engine.clone())
|
||||
})
|
||||
.collect();
|
||||
setting_value.engines = Cow::Owned(engines);
|
||||
|
||||
setting_value.safe_search_level = match setting_value.safe_search_level {
|
||||
0..2 => setting_value.safe_search_level,
|
||||
_ => u8::default(),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A multipart struct which stores user provided input file data in memory.
|
||||
#[derive(MultipartForm)]
|
||||
struct File {
|
||||
/// It stores the input file data in memory.
|
||||
file: TempFile,
|
||||
}
|
||||
|
||||
/// Handles the route of the post settings page.
|
||||
#[post("/settings")]
|
||||
pub async fn set_settings(
|
||||
config: web::Data<&'static Config>,
|
||||
MultipartForm(mut form): MultipartForm<File>,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
if let Some(file_name) = form.file.file_name {
|
||||
let file_name_parts = file_name.split(".");
|
||||
if let 2 = file_name_parts.clone().count() {
|
||||
if let Some("json") = file_name_parts.last() {
|
||||
if let 0 = form.file.size {
|
||||
return Ok(HttpResponse::BadRequest().finish());
|
||||
} else {
|
||||
let mut data = String::new();
|
||||
form.file.file.read_to_string(&mut data).unwrap();
|
||||
|
||||
let mut unsanitized_json_data: models::server_models::Cookie<'_> =
|
||||
serde_json::from_str(&data)?;
|
||||
|
||||
sanitize(config, &mut unsanitized_json_data).await?;
|
||||
|
||||
let sanitized_json_data: String =
|
||||
serde_json::json!(unsanitized_json_data).to_string();
|
||||
|
||||
return Ok(HttpResponse::Ok()
|
||||
.cookie(
|
||||
Cookie::build("appCookie", sanitized_json_data)
|
||||
.expires(
|
||||
OffsetDateTime::now_utc().saturating_add(Duration::weeks(52)),
|
||||
)
|
||||
.finish(),
|
||||
)
|
||||
.finish());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
/// Handles the route of the download page.
|
||||
#[get("/download")]
|
||||
pub async fn download(
|
||||
config: web::Data<&'static Config>,
|
||||
req: HttpRequest,
|
||||
) -> Result<HttpResponse, Box<dyn std::error::Error>> {
|
||||
let cookie = req.cookie("appCookie");
|
||||
|
||||
// Get search settings using the user's cookie or from the server's config
|
||||
let preferences: server_models::Cookie<'_> = cookie
|
||||
.as_ref()
|
||||
.and_then(|cookie_value| serde_json::from_str(cookie_value.value()).ok())
|
||||
.unwrap_or_else(|| {
|
||||
server_models::Cookie::build(
|
||||
&config.style,
|
||||
config
|
||||
.upstream_search_engines
|
||||
.iter()
|
||||
.filter_map(|(engine, enabled)| {
|
||||
enabled.then_some(Cow::Borrowed(engine.as_str()))
|
||||
})
|
||||
.collect(),
|
||||
u8::default(),
|
||||
)
|
||||
});
|
||||
|
||||
Ok(HttpResponse::Ok().json(preferences))
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
//! This module provides modules to handle various routes in the search engine website.
|
||||
|
||||
pub mod export_import;
|
||||
pub mod search;
|
||||
|
|
|
@ -55,7 +55,6 @@ pub async fn search(
|
|||
|
||||
// Get search settings using the user's cookie or from the server's config
|
||||
let mut search_settings: server_models::Cookie<'_> = cookie
|
||||
.as_ref()
|
||||
.and_then(|cookie_value| serde_json::from_str(cookie_value.value()).ok())
|
||||
.unwrap_or_else(|| {
|
||||
server_models::Cookie::build(
|
||||
|
|
|
@ -37,21 +37,6 @@ pub fn general(safe_search_level: u8) -> Markup {
|
|||
option value=(SAFE_SEARCH_LEVELS[2].0){(SAFE_SEARCH_LEVELS[2].1)}
|
||||
}
|
||||
}
|
||||
h3{"Restore preferences from file"}
|
||||
p class="description"{
|
||||
"Select a json file to restore preferences for the search engine."
|
||||
}
|
||||
form method="post" enctype="multipart/form-data" onsubmit="setTimeout(function () { window.location.reload(); }, 10)" action="/settings" target="_self"{
|
||||
input type="file" name="file" accept="application/json";
|
||||
button type="submit" {"Submit"}
|
||||
}
|
||||
h3{"Export preferences to a file"}
|
||||
p class="description"{
|
||||
"Export all the settings saved as a cookie as a json file."
|
||||
}
|
||||
a class="export_btn" href="./download" download="settings.json" {
|
||||
button type="submit" {"Export"}
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -76,7 +76,6 @@ upstream_search_engines = {
|
|||
LibreX = false,
|
||||
Mojeek = false,
|
||||
Bing = false,
|
||||
Wikipedia = true,
|
||||
} -- select the upstream search engines from which the results should be fetched.
|
||||
|
||||
proxy = nil -- Proxy to send outgoing requests through. Set to nil to disable.
|
Loading…
Add table
Reference in a new issue