Browse Source

(feat) : added the max idle pool per host option

KekmaTime 10 tháng trước cách đây
mục cha
commit
b7d0ef7252

+ 3 - 3
src/cache/cacher.rs

@@ -546,7 +546,7 @@ impl SharedCache {
     /// # Arguments
     ///
     /// * `url` - It takes the search url as an argument which will be used as the key to fetch the
-    /// cached results from the cache.
+    ///   cached results from the cache.
     ///
     /// # Error
     ///
@@ -563,9 +563,9 @@ impl SharedCache {
     /// # Arguments
     ///
     /// * `search_results` - It takes the `SearchResults` as an argument which are results that
-    /// needs to be cached.
+    ///   needs to be cached.
     /// * `url` - It takes the search url as an argument which will be used as the key for storing
-    /// results in the cache.
+    ///   results in the cache.
     ///
     /// # Error
     ///

+ 4 - 1
src/config/parser.rs

@@ -48,6 +48,8 @@ pub struct Config {
     pub tcp_connection_keep_alive: u8,
     /// It stores the pool idle connection timeout in seconds.
     pub pool_idle_connection_timeout: u8,
+    /// It stores the number of https connections to keep in the pool.
+    pub number_of_https_connections: u8,
 }
 
 impl Config {
@@ -57,7 +59,7 @@ impl Config {
     /// # Arguments
     ///
     /// * `logging_initialized` - It takes a boolean which ensures that the logging doesn't get
-    /// initialized twice. Pass false if the logger has not yet been initialized.
+    ///   initialized twice. Pass false if the logger has not yet been initialized.
     ///
     /// # Error
     ///
@@ -139,6 +141,7 @@ impl Config {
             request_timeout: globals.get::<_, u8>("request_timeout")?,
             tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
             pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
+            number_of_https_connections: globals.get::<_, u8>("number_of_https_connections")?,
             threads,
             client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
             rate_limiter: RateLimiter {

+ 8 - 6
src/models/aggregation_models.rs

@@ -11,7 +11,9 @@ use thesaurus::synonyms;
 /// A named struct to store the raw scraped search results scraped search results from the
 /// upstream search engines before aggregating it.It derives the Clone trait which is needed
 /// to write idiomatic rust using `Iterators`.
-/// (href url in html in simple words).
+///
+///   (href url in html in simple words).
+///
 #[derive(Clone, Serialize, Deserialize)]
 #[serde(rename_all = "camelCase")]
 pub struct SearchResult {
@@ -34,7 +36,7 @@ impl SearchResult {
     ///
     /// * `title` - The title of the search result.
     /// * `url` - The url which is accessed when clicked on it
-    /// (href url in html in simple words).
+    ///   (href url in html in simple words).
     /// * `description` - The description of the search result.
     /// * `engine` - The names of the upstream engines from which this results were provided.
     pub fn new(title: &str, url: &str, description: &str, engine: &[&str]) -> Self {
@@ -124,7 +126,7 @@ impl EngineErrorInfo {
     /// # Arguments
     ///
     /// * `error` - It takes the error type which occured while fetching the result from a particular
-    /// search engine.
+    ///   search engine.
     /// * `engine` - It takes the name of the engine that failed to provide the requested search results.
     pub fn new(error: &EngineError, engine: &str) -> Self {
         Self {
@@ -177,11 +179,11 @@ impl SearchResults {
     /// # Arguments
     ///
     /// * `results` - Takes an argument of individual serializable `SearchResult` struct
-    /// and stores it into a vector of `SearchResult` structs.
+    ///   and stores it into a vector of `SearchResult` structs.
     /// * `page_query` - Takes an argument of current page`s search query `q` provided in
-    /// the search url.
+    ///   the search url.
     /// * `engine_errors_info` - Takes an array of structs which contains information regarding
-    /// which engines failed with their names, reason and their severity color name.
+    ///   which engines failed with their names, reason and their severity color name.
     pub fn new(results: Box<[SearchResult]>, engine_errors_info: Box<[EngineErrorInfo]>) -> Self {
         Self {
             results,

+ 1 - 1
src/models/parser_models.rs

@@ -29,7 +29,7 @@ impl Style {
     ///
     /// * `theme` - It takes the parsed theme option used to set a theme for the website.
     /// * `colorscheme` - It takes the parsed colorscheme option used to set a colorscheme
-    /// for the theme being used.
+    ///   for the theme being used.
     pub fn new(theme: String, colorscheme: String, animation: Option<String>) -> Self {
         Style {
             theme,

+ 2 - 1
src/results/aggregator.rs

@@ -60,7 +60,7 @@ type FutureVec =
 /// * `debug` - Accepts a boolean value to enable or disable debug mode option.
 /// * `upstream_search_engines` - Accepts a vector of search engine names which was selected by the
 /// * `request_timeout` - Accepts a time (secs) as a value which controls the server request timeout.
-/// user through the UI or the config file.
+///   user through the UI or the config file.
 ///
 /// # Error
 ///
@@ -81,6 +81,7 @@ pub async fn aggregate(
                 config.pool_idle_connection_timeout as u64,
             ))
             .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
+            .pool_max_idle_per_host(config.number_of_https_connections as usize)
             .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
             .https_only(true)
             .gzip(true)

+ 1 - 1
src/server/routes/search.rs

@@ -146,7 +146,7 @@ pub async fn search(
 /// # Arguments
 ///
 /// * `url` - It takes the url of the current page that requested the search results for a
-/// particular search query.
+///   particular search query.
 /// * `config` - It takes a parsed config struct.
 /// * `query` - It takes the page number as u32 value.
 /// * `req` - It takes the `HttpRequest` struct as a value.

+ 1 - 1
src/templates/partials/search_bar.rs

@@ -12,7 +12,7 @@ const SAFE_SEARCH_LEVELS_NAME: [&str; 3] = ["None", "Low", "Moderate"];
 /// # Arguments
 ///
 /// * `engine_errors_info` - It takes the engine errors list containing errors for each upstream
-/// search engine which failed to provide results as an argument.
+///   search engine which failed to provide results as an argument.
 /// * `safe_search_level` - It takes the safe search level with values from 0-2 as an argument.
 /// * `query` - It takes the current search query provided by user as an argument.
 ///

+ 1 - 1
src/templates/partials/settings_tabs/engines.rs

@@ -9,7 +9,7 @@ use maud::{html, Markup};
 /// # Arguments
 ///
 /// * `engine_names` - It takes the key value pair list of all available engine names and there corresponding
-/// selected (enabled/disabled) value as an argument.
+///   selected (enabled/disabled) value as an argument.
 ///
 /// # Returns
 ///

+ 2 - 2
src/templates/partials/settings_tabs/user_interface.rs

@@ -11,9 +11,9 @@ use std::fs::read_dir;
 /// # Arguments
 ///
 /// * `style_type` - It takes the style type of the values `theme` and `colorscheme` as an
-/// argument.
+///   argument.
 /// * `selected_style` - It takes the currently selected style value provided via the config file
-/// as an argument.
+///   as an argument.
 ///
 /// # Error
 ///