feat: create individual databases for each test

This commit is contained in:
Aravinth Manivannan 2023-11-05 00:49:06 +05:30
parent 36600e2f13
commit 321fd2e89b
No known key found for this signature in database
GPG key ID: F8F50389936984FF
7 changed files with 134 additions and 85 deletions

View file

@ -313,8 +313,11 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase {
/// Get the entry at a location in the list of analytics entires under a certain time limit /// Get the entry at a location in the list of analytics entires under a certain time limit
/// and sorted in ascending order /// and sorted in ascending order
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>>; async fn stats_get_entry_at_location_for_time_limit_asc(
&self,
duration: u32,
location: u32,
) -> DBResult<Option<usize>>;
} }
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)] #[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]

View file

@ -309,12 +309,26 @@ pub async fn database_works<'a, T: MCDatabase>(
worker_type: "wasm".into(), worker_type: "wasm".into(),
}; };
assert_eq!(
assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 0); db.stats_get_num_logs_under_time(analytics.time)
.await
.unwrap(),
0
);
db.analysis_save(c.key, &analytics).await.unwrap(); db.analysis_save(c.key, &analytics).await.unwrap();
assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 1); assert_eq!(
assert_eq!(db.stats_get_num_logs_under_time(analytics.time - 1).await.unwrap(), 0); db.stats_get_num_logs_under_time(analytics.time)
.await
.unwrap(),
1
);
assert_eq!(
db.stats_get_num_logs_under_time(analytics.time - 1)
.await
.unwrap(),
0
);
let limit = 50; let limit = 50;
let mut offset = 0; let mut offset = 0;
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap(); let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
@ -345,13 +359,11 @@ pub async fn database_works<'a, T: MCDatabase>(
difficulty_factor: 3, difficulty_factor: 3,
worker_type: "wasm".into(), worker_type: "wasm".into(),
}, },
CreatePerformanceAnalytics { CreatePerformanceAnalytics {
time: 4, time: 4,
difficulty_factor: 4, difficulty_factor: 4,
worker_type: "wasm".into(), worker_type: "wasm".into(),
}, },
CreatePerformanceAnalytics { CreatePerformanceAnalytics {
time: 5, time: 5,
difficulty_factor: 5, difficulty_factor: 5,
@ -361,10 +373,23 @@ pub async fn database_works<'a, T: MCDatabase>(
for a in rest_analytics.iter() { for a in rest_analytics.iter() {
db.analysis_save(c.key, &a).await.unwrap(); db.analysis_save(c.key, &a).await.unwrap();
} }
assert!(db.stats_get_entry_at_location_for_time_limit_asc(1, 2).await.unwrap().is_none()); assert!(db
assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(2, 1).await.unwrap(), Some(2)); .stats_get_entry_at_location_for_time_limit_asc(1, 2)
assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(3, 2).await.unwrap(), Some(3)); .await
.unwrap()
.is_none());
assert_eq!(
db.stats_get_entry_at_location_for_time_limit_asc(2, 1)
.await
.unwrap(),
Some(2)
);
assert_eq!(
db.stats_get_entry_at_location_for_time_limit_asc(3, 2)
.await
.unwrap(),
Some(3)
);
db.analytics_delete_all_records_for_campaign(c.key) db.analytics_delete_all_records_for_campaign(c.key)
.await .await

View file

@ -1220,11 +1220,8 @@ impl MCDatabase for Database {
} }
} }
/// Get number of analytics entries that are under a certain duration /// Get number of analytics entries that are under a certain duration
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> { async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> {
struct Count { struct Count {
count: Option<i64>, count: Option<i64>,
} }
@ -1248,9 +1245,11 @@ impl MCDatabase for Database {
/// Get the entry at a location in the list of analytics entires under a certain time limited /// Get the entry at a location in the list of analytics entires under a certain time limited
/// and sorted in ascending order /// and sorted in ascending order
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>> { async fn stats_get_entry_at_location_for_time_limit_asc(
&self,
duration: u32,
location: u32,
) -> DBResult<Option<usize>> {
struct Difficulty { struct Difficulty {
difficulty_factor: Option<i32>, difficulty_factor: Option<i32>,
} }
@ -1272,10 +1271,8 @@ impl MCDatabase for Database {
{ {
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)), Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
Err(sqlx::Error::RowNotFound) => Ok(None), Err(sqlx::Error::RowNotFound) => Ok(None),
Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)) Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)),
} }
} }
} }

View file

@ -7,7 +7,7 @@
use std::env; use std::env;
use sqlx::{mysql::MySqlPoolOptions, migrate::MigrateDatabase}; use sqlx::{migrate::MigrateDatabase, mysql::MySqlPoolOptions};
use url::Url; use url::Url;
use crate::*; use crate::*;

View file

@ -1230,7 +1230,6 @@ impl MCDatabase for Database {
/// Get number of analytics entries that are under a certain duration /// Get number of analytics entries that are under a certain duration
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> { async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> {
struct Count { struct Count {
count: Option<i64>, count: Option<i64>,
} }
@ -1249,9 +1248,11 @@ impl MCDatabase for Database {
/// Get the entry at a location in the list of analytics entires under a certain time limit /// Get the entry at a location in the list of analytics entires under a certain time limit
/// and sorted in ascending order /// and sorted in ascending order
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>> { async fn stats_get_entry_at_location_for_time_limit_asc(
&self,
duration: u32,
location: u32,
) -> DBResult<Option<usize>> {
struct Difficulty { struct Difficulty {
difficulty_factor: Option<i32>, difficulty_factor: Option<i32>,
} }
@ -1273,13 +1274,9 @@ impl MCDatabase for Database {
{ {
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)), Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
Err(sqlx::Error::RowNotFound) => Ok(None), Err(sqlx::Error::RowNotFound) => Ok(None),
Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)) Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound)),
} }
} }
} }
#[derive(Clone)] #[derive(Clone)]

View file

@ -7,8 +7,8 @@
use std::env; use std::env;
use sqlx::postgres::PgPoolOptions;
use sqlx::migrate::MigrateDatabase; use sqlx::migrate::MigrateDatabase;
use sqlx::postgres::PgPoolOptions;
use url::Url; use url::Url;
use crate::*; use crate::*;
@ -47,7 +47,6 @@ async fn everyting_works() {
} }
sqlx::Postgres::create_database(&url).await.unwrap(); sqlx::Postgres::create_database(&url).await.unwrap();
let pool_options = PgPoolOptions::new().max_connections(2); let pool_options = PgPoolOptions::new().max_connections(2);
let connection_options = ConnectionOptions::Fresh(Fresh { let connection_options = ConnectionOptions::Fresh(Fresh {
pool_options, pool_options,

View file

@ -29,15 +29,28 @@ pub fn get_settings() -> Settings {
pub mod pg { pub mod pg {
use std::env; use std::env;
use sqlx::migrate::MigrateDatabase;
use crate::data::Data; use crate::data::Data;
use crate::settings::*; use crate::settings::*;
use crate::survey::SecretsStore; use crate::survey::SecretsStore;
use crate::api::v1::mcaptcha::get_random;
use crate::ArcData; use crate::ArcData;
use super::get_settings; use super::get_settings;
pub async fn get_data() -> ArcData { pub async fn get_data() -> ArcData {
let url = env::var("POSTGRES_DATABASE_URL").unwrap(); let url = env::var("POSTGRES_DATABASE_URL").unwrap();
let mut parsed = url::Url::parse(&url).unwrap();
parsed.set_path(&get_random(16));
let url = parsed.to_string();
if sqlx::Postgres::database_exists(&url).await.unwrap() {
sqlx::Postgres::drop_database(&url).await.unwrap();
}
sqlx::Postgres::create_database(&url).await.unwrap();
let mut settings = get_settings(); let mut settings = get_settings();
settings.captcha.runners = Some(1); settings.captcha.runners = Some(1);
settings.database.url = url.clone(); settings.database.url = url.clone();
@ -50,15 +63,30 @@ pub mod pg {
pub mod maria { pub mod maria {
use std::env; use std::env;
use sqlx::migrate::MigrateDatabase;
use crate::data::Data; use crate::data::Data;
use crate::settings::*; use crate::settings::*;
use crate::survey::SecretsStore; use crate::survey::SecretsStore;
use crate::ArcData; use crate::ArcData;
use crate::api::v1::mcaptcha::get_random;
use super::get_settings; use super::get_settings;
pub async fn get_data() -> ArcData { pub async fn get_data() -> ArcData {
let url = env::var("MARIA_DATABASE_URL").unwrap(); let url = env::var("MARIA_DATABASE_URL").unwrap();
let mut parsed = url::Url::parse(&url).unwrap();
parsed.set_path(&get_random(16));
let url = parsed.to_string();
if sqlx::MySql::database_exists(&url).await.unwrap() {
sqlx::MySql::drop_database(&url).await.unwrap();
}
sqlx::MySql::create_database(&url).await.unwrap();
let mut settings = get_settings(); let mut settings = get_settings();
settings.captcha.runners = Some(1); settings.captcha.runners = Some(1);
settings.database.url = url.clone(); settings.database.url = url.clone();