feat: database methods to compute percentiles on analysis records
This commit is contained in:
parent
606d22cc9d
commit
36600e2f13
11 changed files with 313 additions and 52 deletions
|
@ -1,2 +1,2 @@
|
||||||
export POSTGRES_DATABASE_URL="postgres://postgres:password@localhost:5432/postgres"
|
export POSTGRES_DATABASE_URL="postgres://postgres:password@localhost:5432/postgres"
|
||||||
export MARIA_DATABASE_URL="mysql://maria:password@localhost:3306/maria"
|
export MARIA_DATABASE_URL="mysql://root:password@localhost:3306/maria"
|
||||||
|
|
|
@ -307,6 +307,14 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase {
|
||||||
captcha_key: &str,
|
captcha_key: &str,
|
||||||
difficulty_factor: u32,
|
difficulty_factor: u32,
|
||||||
) -> DBResult<u32>;
|
) -> DBResult<u32>;
|
||||||
|
|
||||||
|
/// Get number of analytics entries that are under a certain duration
|
||||||
|
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize>;
|
||||||
|
|
||||||
|
/// Get the entry at a location in the list of analytics entires under a certain time limit
|
||||||
|
/// and sorted in ascending order
|
||||||
|
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>>;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
|
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
|
||||||
|
|
|
@ -7,6 +7,29 @@
|
||||||
use crate::errors::*;
|
use crate::errors::*;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
/// easy traffic pattern
|
||||||
|
pub const TRAFFIC_PATTERN: TrafficPattern = TrafficPattern {
|
||||||
|
avg_traffic: 500,
|
||||||
|
peak_sustainable_traffic: 5_000,
|
||||||
|
broke_my_site_traffic: Some(10_000),
|
||||||
|
};
|
||||||
|
|
||||||
|
/// levels for complex captcha config
|
||||||
|
pub const LEVELS: [Level; 3] = [
|
||||||
|
Level {
|
||||||
|
difficulty_factor: 1,
|
||||||
|
visitor_threshold: 1,
|
||||||
|
},
|
||||||
|
Level {
|
||||||
|
difficulty_factor: 2,
|
||||||
|
visitor_threshold: 2,
|
||||||
|
},
|
||||||
|
Level {
|
||||||
|
difficulty_factor: 3,
|
||||||
|
visitor_threshold: 3,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
/// test all database functions
|
/// test all database functions
|
||||||
pub async fn database_works<'a, T: MCDatabase>(
|
pub async fn database_works<'a, T: MCDatabase>(
|
||||||
db: &T,
|
db: &T,
|
||||||
|
@ -250,7 +273,6 @@ pub async fn database_works<'a, T: MCDatabase>(
|
||||||
db.record_confirm(c.key).await.unwrap();
|
db.record_confirm(c.key).await.unwrap();
|
||||||
|
|
||||||
// analytics start
|
// analytics start
|
||||||
|
|
||||||
db.analytics_create_psuedo_id_if_not_exists(c.key)
|
db.analytics_create_psuedo_id_if_not_exists(c.key)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -282,11 +304,17 @@ pub async fn database_works<'a, T: MCDatabase>(
|
||||||
);
|
);
|
||||||
|
|
||||||
let analytics = CreatePerformanceAnalytics {
|
let analytics = CreatePerformanceAnalytics {
|
||||||
time: 0,
|
time: 1,
|
||||||
difficulty_factor: 0,
|
difficulty_factor: 1,
|
||||||
worker_type: "wasm".into(),
|
worker_type: "wasm".into(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 0);
|
||||||
|
|
||||||
db.analysis_save(c.key, &analytics).await.unwrap();
|
db.analysis_save(c.key, &analytics).await.unwrap();
|
||||||
|
assert_eq!(db.stats_get_num_logs_under_time(analytics.time).await.unwrap(), 1);
|
||||||
|
assert_eq!(db.stats_get_num_logs_under_time(analytics.time - 1).await.unwrap(), 0);
|
||||||
let limit = 50;
|
let limit = 50;
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
|
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
|
||||||
|
@ -305,6 +333,39 @@ pub async fn database_works<'a, T: MCDatabase>(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0);
|
assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0);
|
||||||
assert!(!db.analytics_captcha_is_published(c.key).await.unwrap());
|
assert!(!db.analytics_captcha_is_published(c.key).await.unwrap());
|
||||||
|
|
||||||
|
let rest_analytics= [
|
||||||
|
CreatePerformanceAnalytics {
|
||||||
|
time: 2,
|
||||||
|
difficulty_factor: 2,
|
||||||
|
worker_type: "wasm".into(),
|
||||||
|
},
|
||||||
|
CreatePerformanceAnalytics {
|
||||||
|
time: 3,
|
||||||
|
difficulty_factor: 3,
|
||||||
|
worker_type: "wasm".into(),
|
||||||
|
},
|
||||||
|
|
||||||
|
CreatePerformanceAnalytics {
|
||||||
|
time: 4,
|
||||||
|
difficulty_factor: 4,
|
||||||
|
worker_type: "wasm".into(),
|
||||||
|
},
|
||||||
|
|
||||||
|
CreatePerformanceAnalytics {
|
||||||
|
time: 5,
|
||||||
|
difficulty_factor: 5,
|
||||||
|
worker_type: "wasm".into(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for a in rest_analytics.iter() {
|
||||||
|
db.analysis_save(c.key, &a).await.unwrap();
|
||||||
|
}
|
||||||
|
assert!(db.stats_get_entry_at_location_for_time_limit_asc(1, 2).await.unwrap().is_none());
|
||||||
|
assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(2, 1).await.unwrap(), Some(2));
|
||||||
|
assert_eq!(db.stats_get_entry_at_location_for_time_limit_asc(3, 2).await.unwrap(), Some(3));
|
||||||
|
|
||||||
|
|
||||||
db.analytics_delete_all_records_for_campaign(c.key)
|
db.analytics_delete_all_records_for_campaign(c.key)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"db_name": "MySQL",
|
||||||
|
"query": "SELECT\n COUNT(difficulty_factor) AS count\n FROM\n mcaptcha_pow_analytics\n WHERE time <= ?;",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "count",
|
||||||
|
"type_info": {
|
||||||
|
"type": "LongLong",
|
||||||
|
"flags": "NOT_NULL | BINARY",
|
||||||
|
"char_set": 63,
|
||||||
|
"max_size": 21
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 1
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "9bae79667a8cc631541879321e72a40f20cf812584aaf44418089bc7a51e07c4"
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"db_name": "MySQL",
|
||||||
|
"query": "SELECT\n difficulty_factor\n FROM\n mcaptcha_pow_analytics\n WHERE\n time <= ?\n ORDER BY difficulty_factor ASC LIMIT 1 OFFSET ?;",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "difficulty_factor",
|
||||||
|
"type_info": {
|
||||||
|
"type": "Long",
|
||||||
|
"flags": "NOT_NULL | NO_DEFAULT_VALUE",
|
||||||
|
"char_set": 63,
|
||||||
|
"max_size": 11
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Right": 2
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "c4d6ad934e38218931e74ae1c31c6712cbadb40f31bb12e160c9d333c7e3835c"
|
||||||
|
}
|
|
@ -1219,6 +1219,64 @@ impl MCDatabase for Database {
|
||||||
Ok(res.nonce as u32)
|
Ok(res.nonce as u32)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/// Get number of analytics entries that are under a certain duration
|
||||||
|
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> {
|
||||||
|
|
||||||
|
struct Count {
|
||||||
|
count: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
//"SELECT COUNT(*) FROM (SELECT difficulty_factor FROM mcaptcha_pow_analytics WHERE time <= ?) as count",
|
||||||
|
let count = sqlx::query_as!(
|
||||||
|
Count,
|
||||||
|
"SELECT
|
||||||
|
COUNT(difficulty_factor) AS count
|
||||||
|
FROM
|
||||||
|
mcaptcha_pow_analytics
|
||||||
|
WHERE time <= ?;",
|
||||||
|
duration as i32,
|
||||||
|
)
|
||||||
|
.fetch_one(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
|
||||||
|
|
||||||
|
Ok(count.count.unwrap_or_else(|| 0) as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the entry at a location in the list of analytics entires under a certain time limited
|
||||||
|
/// and sorted in ascending order
|
||||||
|
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>> {
|
||||||
|
|
||||||
|
|
||||||
|
struct Difficulty {
|
||||||
|
difficulty_factor: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
match sqlx::query_as!(
|
||||||
|
Difficulty,
|
||||||
|
"SELECT
|
||||||
|
difficulty_factor
|
||||||
|
FROM
|
||||||
|
mcaptcha_pow_analytics
|
||||||
|
WHERE
|
||||||
|
time <= ?
|
||||||
|
ORDER BY difficulty_factor ASC LIMIT 1 OFFSET ?;",
|
||||||
|
duration as i32,
|
||||||
|
location as i64 - 1,
|
||||||
|
)
|
||||||
|
.fetch_one(&self.pool)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
|
||||||
|
Err(sqlx::Error::RowNotFound) => Ok(None),
|
||||||
|
Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
|
@ -5,9 +5,11 @@
|
||||||
|
|
||||||
#![cfg(test)]
|
#![cfg(test)]
|
||||||
|
|
||||||
use sqlx::mysql::MySqlPoolOptions;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
|
use sqlx::{mysql::MySqlPoolOptions, migrate::MigrateDatabase};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
use db_core::tests::*;
|
use db_core::tests::*;
|
||||||
|
@ -26,28 +28,6 @@ async fn everyting_works() {
|
||||||
const HEADING: &str = "testing notifications get db mariadb";
|
const HEADING: &str = "testing notifications get db mariadb";
|
||||||
const MESSAGE: &str = "testing notifications get message db mariadb";
|
const MESSAGE: &str = "testing notifications get message db mariadb";
|
||||||
|
|
||||||
// easy traffic pattern
|
|
||||||
const TRAFFIC_PATTERN: TrafficPattern = TrafficPattern {
|
|
||||||
avg_traffic: 500,
|
|
||||||
peak_sustainable_traffic: 5_000,
|
|
||||||
broke_my_site_traffic: Some(10_000),
|
|
||||||
};
|
|
||||||
|
|
||||||
const LEVELS: [Level; 3] = [
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 1,
|
|
||||||
visitor_threshold: 1,
|
|
||||||
},
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 2,
|
|
||||||
visitor_threshold: 2,
|
|
||||||
},
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 3,
|
|
||||||
visitor_threshold: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const ADD_NOTIFICATION: AddNotification = AddNotification {
|
const ADD_NOTIFICATION: AddNotification = AddNotification {
|
||||||
from: NAME,
|
from: NAME,
|
||||||
to: NAME,
|
to: NAME,
|
||||||
|
@ -56,10 +36,20 @@ async fn everyting_works() {
|
||||||
};
|
};
|
||||||
|
|
||||||
let url = env::var("MARIA_DATABASE_URL").unwrap();
|
let url = env::var("MARIA_DATABASE_URL").unwrap();
|
||||||
|
|
||||||
|
let mut parsed = Url::parse(&url).unwrap();
|
||||||
|
parsed.set_path("db_maria_test");
|
||||||
|
let url = parsed.to_string();
|
||||||
|
|
||||||
|
if sqlx::MySql::database_exists(&url).await.unwrap() {
|
||||||
|
sqlx::MySql::drop_database(&url).await.unwrap();
|
||||||
|
}
|
||||||
|
sqlx::MySql::create_database(&url).await.unwrap();
|
||||||
|
|
||||||
let pool_options = MySqlPoolOptions::new().max_connections(2);
|
let pool_options = MySqlPoolOptions::new().max_connections(2);
|
||||||
let connection_options = ConnectionOptions::Fresh(Fresh {
|
let connection_options = ConnectionOptions::Fresh(Fresh {
|
||||||
pool_options,
|
pool_options,
|
||||||
url,
|
url: url.clone(),
|
||||||
disable_logging: false,
|
disable_logging: false,
|
||||||
});
|
});
|
||||||
let db = connection_options.connect().await.unwrap();
|
let db = connection_options.connect().await.unwrap();
|
||||||
|
@ -78,4 +68,6 @@ async fn everyting_works() {
|
||||||
description: CAPTCHA_DESCRIPTION,
|
description: CAPTCHA_DESCRIPTION,
|
||||||
};
|
};
|
||||||
database_works(&db, &p, &c, &LEVELS, &TRAFFIC_PATTERN, &ADD_NOTIFICATION).await;
|
database_works(&db, &p, &c, &LEVELS, &TRAFFIC_PATTERN, &ADD_NOTIFICATION).await;
|
||||||
|
drop(db);
|
||||||
|
sqlx::MySql::drop_database(&url).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT COUNT(difficulty_factor) FROM mcaptcha_pow_analytics WHERE time <= $1;",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "count",
|
||||||
|
"type_info": "Int8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
null
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "c08c1dd4bfcb6cbd0359c79cc3be79526a012b006ce9deb80bceb4e1a04c835d"
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
{
|
||||||
|
"db_name": "PostgreSQL",
|
||||||
|
"query": "SELECT\n difficulty_factor\n FROM\n mcaptcha_pow_analytics\n WHERE\n time <= $1\n ORDER BY difficulty_factor ASC LIMIT 1 OFFSET $2;",
|
||||||
|
"describe": {
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"ordinal": 0,
|
||||||
|
"name": "difficulty_factor",
|
||||||
|
"type_info": "Int4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Int4",
|
||||||
|
"Int8"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": [
|
||||||
|
false
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"hash": "c67aec0c3d5786fb495b6ed60fa106437d8e5034d3a40bf8face2ca7c12f2694"
|
||||||
|
}
|
|
@ -1227,6 +1227,59 @@ impl MCDatabase for Database {
|
||||||
Ok(res.nonce as u32)
|
Ok(res.nonce as u32)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get number of analytics entries that are under a certain duration
|
||||||
|
async fn stats_get_num_logs_under_time(&self, duration: u32) -> DBResult<usize> {
|
||||||
|
|
||||||
|
struct Count {
|
||||||
|
count: Option<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let count = sqlx::query_as!(
|
||||||
|
Count,
|
||||||
|
"SELECT COUNT(difficulty_factor) FROM mcaptcha_pow_analytics WHERE time <= $1;",
|
||||||
|
duration as i32,
|
||||||
|
)
|
||||||
|
.fetch_one(&self.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
|
||||||
|
|
||||||
|
Ok(count.count.unwrap_or_else(|| 0) as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the entry at a location in the list of analytics entires under a certain time limit
|
||||||
|
/// and sorted in ascending order
|
||||||
|
async fn stats_get_entry_at_location_for_time_limit_asc(&self, duration: u32, location: u32) -> DBResult<Option<usize>> {
|
||||||
|
|
||||||
|
|
||||||
|
struct Difficulty {
|
||||||
|
difficulty_factor: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
match sqlx::query_as!(
|
||||||
|
Difficulty,
|
||||||
|
"SELECT
|
||||||
|
difficulty_factor
|
||||||
|
FROM
|
||||||
|
mcaptcha_pow_analytics
|
||||||
|
WHERE
|
||||||
|
time <= $1
|
||||||
|
ORDER BY difficulty_factor ASC LIMIT 1 OFFSET $2;",
|
||||||
|
duration as i32,
|
||||||
|
location as i64 - 1,
|
||||||
|
)
|
||||||
|
.fetch_one(&self.pool)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(res) => Ok(Some(res.difficulty_factor.unwrap() as usize)),
|
||||||
|
Err(sqlx::Error::RowNotFound) => Ok(None),
|
||||||
|
Err(e) => Err(map_row_not_found_err(e, DBError::CaptchaNotFound))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
|
@ -5,9 +5,12 @@
|
||||||
|
|
||||||
#![cfg(test)]
|
#![cfg(test)]
|
||||||
|
|
||||||
use sqlx::postgres::PgPoolOptions;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
|
use sqlx::postgres::PgPoolOptions;
|
||||||
|
use sqlx::migrate::MigrateDatabase;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use crate::*;
|
use crate::*;
|
||||||
|
|
||||||
use db_core::tests::*;
|
use db_core::tests::*;
|
||||||
|
@ -26,28 +29,6 @@ async fn everyting_works() {
|
||||||
const HEADING: &str = "testing notifications get db postgres";
|
const HEADING: &str = "testing notifications get db postgres";
|
||||||
const MESSAGE: &str = "testing notifications get message db postgres";
|
const MESSAGE: &str = "testing notifications get message db postgres";
|
||||||
|
|
||||||
// easy traffic pattern
|
|
||||||
const TRAFFIC_PATTERN: TrafficPattern = TrafficPattern {
|
|
||||||
avg_traffic: 500,
|
|
||||||
peak_sustainable_traffic: 5_000,
|
|
||||||
broke_my_site_traffic: Some(10_000),
|
|
||||||
};
|
|
||||||
|
|
||||||
const LEVELS: [Level; 3] = [
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 1,
|
|
||||||
visitor_threshold: 1,
|
|
||||||
},
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 2,
|
|
||||||
visitor_threshold: 2,
|
|
||||||
},
|
|
||||||
Level {
|
|
||||||
difficulty_factor: 3,
|
|
||||||
visitor_threshold: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
const ADD_NOTIFICATION: AddNotification = AddNotification {
|
const ADD_NOTIFICATION: AddNotification = AddNotification {
|
||||||
from: NAME,
|
from: NAME,
|
||||||
to: NAME,
|
to: NAME,
|
||||||
|
@ -56,10 +37,21 @@ async fn everyting_works() {
|
||||||
};
|
};
|
||||||
|
|
||||||
let url = env::var("POSTGRES_DATABASE_URL").unwrap();
|
let url = env::var("POSTGRES_DATABASE_URL").unwrap();
|
||||||
|
|
||||||
|
let mut parsed = Url::parse(&url).unwrap();
|
||||||
|
parsed.set_path("db_postgres_test");
|
||||||
|
let url = parsed.to_string();
|
||||||
|
|
||||||
|
if sqlx::Postgres::database_exists(&url).await.unwrap() {
|
||||||
|
sqlx::Postgres::drop_database(&url).await.unwrap();
|
||||||
|
}
|
||||||
|
sqlx::Postgres::create_database(&url).await.unwrap();
|
||||||
|
|
||||||
|
|
||||||
let pool_options = PgPoolOptions::new().max_connections(2);
|
let pool_options = PgPoolOptions::new().max_connections(2);
|
||||||
let connection_options = ConnectionOptions::Fresh(Fresh {
|
let connection_options = ConnectionOptions::Fresh(Fresh {
|
||||||
pool_options,
|
pool_options,
|
||||||
url,
|
url: url.clone(),
|
||||||
disable_logging: false,
|
disable_logging: false,
|
||||||
});
|
});
|
||||||
let db = connection_options.connect().await.unwrap();
|
let db = connection_options.connect().await.unwrap();
|
||||||
|
@ -78,4 +70,6 @@ async fn everyting_works() {
|
||||||
description: CAPTCHA_DESCRIPTION,
|
description: CAPTCHA_DESCRIPTION,
|
||||||
};
|
};
|
||||||
database_works(&db, &p, &c, &LEVELS, &TRAFFIC_PATTERN, &ADD_NOTIFICATION).await;
|
database_works(&db, &p, &c, &LEVELS, &TRAFFIC_PATTERN, &ADD_NOTIFICATION).await;
|
||||||
|
drop(db);
|
||||||
|
sqlx::Postgres::drop_database(&url).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue