diff --git a/db/db-core/src/lib.rs b/db/db-core/src/lib.rs
index 268f7f0e..98b8336a 100644
--- a/db/db-core/src/lib.rs
+++ b/db/db-core/src/lib.rs
@@ -313,8 +313,11 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase {
/// Get the entry at a location in the list of analytics entires under a certain time limit
/// and sorted in ascending order
- async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult>;
-
+ async fn stats_get_entry_at_location_for_time_limit_asc(
+ &self,
+ duration: u32,
+ location: u32,
+ ) -> DBResult >;
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
diff --git a/db/db-core/src/tests.rs b/db/db-core/src/tests.rs
index d4a3a26a..d0204a1a 100644
--- a/db/db-core/src/tests.rs
+++ b/db/db-core/src/tests.rs
@@ -9,26 +9,26 @@ use crate::prelude::*;
/// easy traffic pattern
pub const TRAFFIC_PATTERN: TrafficPattern = TrafficPattern {
- avg_traffic: 500,
- peak_sustainable_traffic: 5_000,
- broke_my_site_traffic: Some(10_000),
- };
+ avg_traffic: 500,
+ peak_sustainable_traffic: 5_000,
+ broke_my_site_traffic: Some(10_000),
+};
/// levels for complex captcha config
-pub const LEVELS: [Level; 3] = [
- Level {
- difficulty_factor: 1,
- visitor_threshold: 1,
- },
- Level {
- difficulty_factor: 2,
- visitor_threshold: 2,
- },
- Level {
- difficulty_factor: 3,
- visitor_threshold: 3,
- },
- ];
+pub const LEVELS: [Level; 3] = [
+ Level {
+ difficulty_factor: 1,
+ visitor_threshold: 1,
+ },
+ Level {
+ difficulty_factor: 2,
+ visitor_threshold: 2,
+ },
+ Level {
+ difficulty_factor: 3,
+ visitor_threshold: 3,
+ },
+];
/// test all database functions
pub async fn database_works<'a, T: MCDatabase>(
@@ -309,12 +309,26 @@ pub async fn database_works<'a, T: MCDatabase>(
worker_type: "wasm".into(),
};
-
- assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 0);
+ assert_eq!(
+ db.stats_get_num_logs_under_time(analytics.time)
+ .await
+ .unwrap(),
+ 0
+ );
db.analysis_save(c.key, &analytics).await.unwrap();
- assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 1);
- assert_eq!(db.stats_get_num_logs_under_time(analytics.time - 1).await.unwrap(), 0);
+ assert_eq!(
+ db.stats_get_num_logs_under_time(analytics.time)
+ .await
+ .unwrap(),
+ 1
+ );
+ assert_eq!(
+ db.stats_get_num_logs_under_time(analytics.time - 1)
+ .await
+ .unwrap(),
+ 0
+ );
let limit = 50;
let mut offset = 0;
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
@@ -334,37 +348,48 @@ pub async fn database_works<'a, T: MCDatabase>(
assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0);
assert!(!db.analytics_captcha_is_published(c.key).await.unwrap());
- let rest_analytics= [
+ let rest_analytics = [
CreatePerformanceAnalytics {
- time: 2,
- difficulty_factor: 2,
- worker_type: "wasm".into(),
- },
+ time: 2,
+ difficulty_factor: 2,
+ worker_type: "wasm".into(),
+ },
CreatePerformanceAnalytics {
- time: 3,
- difficulty_factor: 3,
- worker_type: "wasm".into(),
- },
-
+ time: 3,
+ difficulty_factor: 3,
+ worker_type: "wasm".into(),
+ },
CreatePerformanceAnalytics {
- time: 4,
- difficulty_factor: 4,
- worker_type: "wasm".into(),
- },
-
+ time: 4,
+ difficulty_factor: 4,
+ worker_type: "wasm".into(),
+ },
CreatePerformanceAnalytics {
- time: 5,
- difficulty_factor: 5,
- worker_type: "wasm".into(),
- },
+ time: 5,
+ difficulty_factor: 5,
+ worker_type: "wasm".into(),
+ },
];
for a in rest_analytics.iter() {
db.analysis_save(c.key, &a).await.unwrap();
}
- assert!(db.stats_get_entry_at_location_for_time_limit_asc(1, 2).await.unwrap().is_none());
- assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(2, 1).await.unwrap(), Some(2));
- assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(3, 2).await.unwrap(), Some(3));
-
+ assert!(db
+ .stats_get_entry_at_location_for_time_limit_asc(1, 2)
+ .await
+ .unwrap()
+ .is_none());
+ assert_eq!(
+ db.stats_get_entry_at_location_for_time_limit_asc(2, 1)
+ .await
+ .unwrap(),
+ Some(2)
+ );
+ assert_eq!(
+ db.stats_get_entry_at_location_for_time_limit_asc(3, 2)
+ .await
+ .unwrap(),
+ Some(3)
+ );
db.analytics_delete_all_records_for_campaign(c.key)
.await
diff --git a/db/db-sqlx-maria/src/lib.rs b/db/db-sqlx-maria/src/lib.rs
index 95b256ad..3c0467ef 100644
--- a/db/db-sqlx-maria/src/lib.rs
+++ b/db/db-sqlx-maria/src/lib.rs
@@ -1220,37 +1220,36 @@ impl MCDatabase for Database {
}
}
-
-
/// Get number of analytics entries that are under a certain duration
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult {
-
- struct Count {
- count: Option,
- }
+ struct Count {
+ count: Option,
+ }
//"SELECT COUNT(*) FROM (SELECT difficulty_factor FROM mcaptcha_pow_analytics WHERE time <= ?) as count",
- let count = sqlx::query_as!(
- Count,
+ let count = sqlx::query_as!(
+ Count,
"SELECT
COUNT(difficulty_factor) AS count
FROM
mcaptcha_pow_analytics
WHERE time <= ?;",
- duration as i32,
- )
- .fetch_one(&self.pool)
- .await
- .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
+ duration as i32,
+ )
+ .fetch_one(&self.pool)
+ .await
+ .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(count.count.unwrap_or_else(|| 0) as usize)
}
/// Get the entry at a location in the list of analytics entires under a certain time limited
/// and sorted in ascending order
- async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult> {
-
-
+ async fn stats_get_entry_at_location_for_time_limit_asc(
+ &self,
+ duration: u32,
+ location: u32,
+ ) -> DBResult > {
struct Difficulty {
difficulty_factor: Option,
}
@@ -1272,10 +1271,8 @@ impl MCDatabase for Database {
{
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
Err(sqlx::Error::RowNotFound) => Ok(None),
- Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound))
-
+ Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)),
}
-
}
}
diff --git a/db/db-sqlx-maria/src/tests.rs b/db/db-sqlx-maria/src/tests.rs
index 030b7eb8..3bae1fee 100644
--- a/db/db-sqlx-maria/src/tests.rs
+++ b/db/db-sqlx-maria/src/tests.rs
@@ -7,7 +7,7 @@
use std::env;
-use sqlx::{mysql::MySqlPoolOptions, migrate::MigrateDatabase};
+use sqlx::{migrate::MigrateDatabase, mysql::MySqlPoolOptions};
use url::Url;
use crate::*;
diff --git a/db/db-sqlx-postgres/src/lib.rs b/db/db-sqlx-postgres/src/lib.rs
index 79cbbccf..f76c9f6b 100644
--- a/db/db-sqlx-postgres/src/lib.rs
+++ b/db/db-sqlx-postgres/src/lib.rs
@@ -1230,28 +1230,29 @@ impl MCDatabase for Database {
/// Get number of analytics entries that are under a certain duration
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult {
+ struct Count {
+ count: Option,
+ }
- struct Count {
- count: Option,
- }
-
- let count = sqlx::query_as!(
+ let count = sqlx::query_as!(
Count,
"SELECT COUNT(difficulty_factor) FROM mcaptcha_pow_analytics WHERE time <= $1;",
duration as i32,
)
- .fetch_one(&self.pool)
- .await
- .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
+ .fetch_one(&self.pool)
+ .await
+ .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(count.count.unwrap_or_else(|| 0) as usize)
}
/// Get the entry at a location in the list of analytics entires under a certain time limit
/// and sorted in ascending order
- async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult> {
-
-
+ async fn stats_get_entry_at_location_for_time_limit_asc(
+ &self,
+ duration: u32,
+ location: u32,
+ ) -> DBResult > {
struct Difficulty {
difficulty_factor: Option,
}
@@ -1273,13 +1274,9 @@ impl MCDatabase for Database {
{
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
Err(sqlx::Error::RowNotFound) => Ok(None),
- Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound))
-
+ Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)),
}
-
-
}
-
}
#[derive(Clone)]
diff --git a/db/db-sqlx-postgres/src/tests.rs b/db/db-sqlx-postgres/src/tests.rs
index 6957182e..27b9108d 100644
--- a/db/db-sqlx-postgres/src/tests.rs
+++ b/db/db-sqlx-postgres/src/tests.rs
@@ -7,8 +7,8 @@
use std::env;
-use sqlx::postgres::PgPoolOptions;
use sqlx::migrate::MigrateDatabase;
+use sqlx::postgres::PgPoolOptions;
use url::Url;
use crate::*;
@@ -47,7 +47,6 @@ async fn everyting_works() {
}
sqlx::Postgres::create_database(&url).await.unwrap();
-
let pool_options = PgPoolOptions::new().max_connections(2);
let connection_options = ConnectionOptions::Fresh(Fresh {
pool_options,
diff --git a/src/tests/mod.rs b/src/tests/mod.rs
index d6673015..927ab0d1 100644
--- a/src/tests/mod.rs
+++ b/src/tests/mod.rs
@@ -29,15 +29,28 @@ pub fn get_settings() -> Settings {
pub mod pg {
use std::env;
+ use sqlx::migrate::MigrateDatabase;
+
use crate::data::Data;
use crate::settings::*;
use crate::survey::SecretsStore;
+ use crate::api::v1::mcaptcha::get_random;
use crate::ArcData;
use super::get_settings;
pub async fn get_data() -> ArcData {
let url = env::var("POSTGRES_DATABASE_URL").unwrap();
+
+ let mut parsed = url::Url::parse(&url).unwrap();
+ parsed.set_path(&get_random(16));
+ let url = parsed.to_string();
+
+ if sqlx::Postgres::database_exists(&url).await.unwrap() {
+ sqlx::Postgres::drop_database(&url).await.unwrap();
+ }
+ sqlx::Postgres::create_database(&url).await.unwrap();
+
let mut settings = get_settings();
settings.captcha.runners = Some(1);
settings.database.url = url.clone();
@@ -50,15 +63,30 @@ pub mod pg {
pub mod maria {
use std::env;
+ use sqlx::migrate::MigrateDatabase;
+
use crate::data::Data;
use crate::settings::*;
use crate::survey::SecretsStore;
use crate::ArcData;
+ use crate::api::v1::mcaptcha::get_random;
use super::get_settings;
pub async fn get_data() -> ArcData {
let url = env::var("MARIA_DATABASE_URL").unwrap();
+
+
+
+ let mut parsed = url::Url::parse(&url).unwrap();
+ parsed.set_path(&get_random(16));
+ let url = parsed.to_string();
+
+ if sqlx::MySql::database_exists(&url).await.unwrap() {
+ sqlx::MySql::drop_database(&url).await.unwrap();
+ }
+ sqlx::MySql::create_database(&url).await.unwrap();
+
let mut settings = get_settings();
settings.captcha.runners = Some(1);
settings.database.url = url.clone();