浏览代码

:wrench: Added client connection keep alive option

ddotthomas 1 年之前
父节点
当前提交
a7e0906c0b
共有 4 个文件被更改,包括 14 次插入5 次删除
  1. 5 2
      src/config/parser.rs
  2. 5 1
      src/lib.rs
  3. 1 1
      src/results/aggregator.rs
  4. 3 1
      websurfx/config.lua

+ 5 - 2
src/config/parser.rs

@@ -37,13 +37,15 @@ pub struct Config {
     pub request_timeout: u8,
     /// It stores the number of threads which controls the app will use to run.
     pub threads: u8,
+    /// Set the keep-alive time for client connections to the HTTP server
+    pub client_connection_keep_alive: u8,
     /// It stores configuration options for the ratelimiting middleware.
     pub rate_limiter: RateLimiter,
     /// It stores the level of safe search to be used for restricting content in the
     /// search results.
     pub safe_search: u8,
     /// It stores the TCP connection keepalive duration in seconds.
-    pub tcp_connection_keepalive: u8,
+    pub tcp_connection_keep_alive: u8,
     /// It stores the pool idle connection timeout in seconds.
     pub pool_idle_connection_timeout: u8,
 }
@@ -135,9 +137,10 @@ impl Config {
             upstream_search_engines: globals
                 .get::<_, HashMap<String, bool>>("upstream_search_engines")?,
             request_timeout: globals.get::<_, u8>("request_timeout")?,
-            tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?,
+            tcp_connection_keep_alive: globals.get::<_, u8>("tcp_connection_keep_alive")?,
             pool_idle_connection_timeout: globals.get::<_, u8>("pool_idle_connection_timeout")?,
             threads,
+            client_connection_keep_alive: globals.get::<_, u8>("client_connection_keep_alive")?,
             rate_limiter: RateLimiter {
                 number_of_requests: rate_limiter["number_of_requests"],
                 time_limit: rate_limiter["time_limit"],

+ 5 - 1
src/lib.rs

@@ -14,7 +14,7 @@ pub mod results;
 pub mod server;
 pub mod templates;
 
-use std::{net::TcpListener, sync::OnceLock};
+use std::{net::TcpListener, sync::OnceLock, time::Duration};
 
 use crate::server::router;
 
@@ -113,6 +113,10 @@ pub fn run(
             .default_service(web::route().to(router::not_found)) // error page
     })
     .workers(config.threads as usize)
+    // Set the keep-alive timer for client connections
+    .keep_alive(Duration::from_secs(
+        config.client_connection_keep_alive as u64,
+    ))
     // Start server on 127.0.0.1 with the user provided port number. for example 127.0.0.1:8080.
     .listen(listener)?
     .run();

+ 1 - 1
src/results/aggregator.rs

@@ -81,7 +81,7 @@ pub async fn aggregate(
             .pool_idle_timeout(Duration::from_secs(
                 config.pool_idle_connection_timeout as u64,
             ))
-            .tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64))
+            .tcp_keepalive(Duration::from_secs(config.tcp_connection_keep_alive as u64))
             .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server
             .https_only(true)
             .gzip(true)

+ 3 - 1
websurfx/config.lua

@@ -10,7 +10,7 @@ production_use = false -- whether to use production mode or not (in other words
 -- if production_use is set to true
 -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests.
 request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds).
-tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds).
+tcp_connection_keep_alive = 30 -- the amount of time the tcp connection should remain alive to the upstream search engines (or connected to the server). (value in seconds).
 pool_idle_connection_timeout = 30 -- timeout for the idle connections in the reqwest HTTP connection pool (value in seconds).
 rate_limiter = {
 	number_of_requests = 20, -- The number of request that are allowed within a provided time limit.
@@ -18,6 +18,8 @@ rate_limiter = {
 }
 -- Set whether the server will use an adaptive/dynamic HTTPS window size, see https://httpwg.org/specs/rfc9113.html#fc-principles
 https_adaptive_window_size = false
+-- Set keep-alive timer in seconds; keeps clients connected to the HTTP server, different from the connection to upstream search engines
+client_connection_keep_alive = 120
 
 -- ### Search ###
 -- Filter results based on different levels. The levels provided are: