Merge pull request #93 from mCaptcha/pow-live-stats

Pow live stats
This commit is contained in:
Aravinth Manivannan 2023-07-02 23:22:05 +05:30 committed by GitHub
commit b2f373dffd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 1348 additions and 82 deletions

View file

@ -52,7 +52,7 @@ jobs:
maria:
image: mariadb
image: mariadb:10
env:
MARIADB_USER: "maria"
MARIADB_PASSWORD: "password"
@ -103,7 +103,7 @@ jobs:
run: make frontend
- name: Run the frontend tests
run: make frontend-test
run: make test.frontend
- name: Run migrations
run: make migrate

View file

@ -54,7 +54,7 @@ jobs:
- 10025:1025
maria:
image: mariadb
image: mariadb:10
env:
MARIADB_USER: "maria"
MARIADB_PASSWORD: "password"

17
Cargo.lock generated
View file

@ -885,6 +885,7 @@ dependencies = [
"futures",
"sqlx",
"url",
"uuid",
]
[[package]]
@ -897,6 +898,7 @@ dependencies = [
"futures",
"sqlx",
"url",
"uuid",
]
[[package]]
@ -1580,8 +1582,8 @@ checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565"
[[package]]
name = "libmcaptcha"
version = "0.2.2"
source = "git+https://github.com/mCaptcha/libmcaptcha?branch=master#22e3cb1a1c774ed352da17e4919e1ee177264e07"
version = "0.2.3"
source = "git+https://github.com/mCaptcha/libmcaptcha?tag=0.2.3#6bc0d11f8839d18c4d930cac0c0b86505ed1b2f6"
dependencies = [
"actix",
"crossbeam-channel",
@ -1722,6 +1724,7 @@ dependencies = [
"tokio",
"url",
"urlencoding",
"uuid",
"validator",
]
@ -3392,6 +3395,16 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "uuid"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be"
dependencies = [
"getrandom",
"serde 1.0.143",
]
[[package]]
name = "validator"
version = "0.15.0"

View file

@ -59,8 +59,8 @@ log = "0.4"
lazy_static = "1.4"
#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.2" }
libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.3" }
#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
#libmcaptcha = { path = "../libmcaptcha", features = ["full"]}
rand = "0.8"
@ -79,6 +79,7 @@ lettre = { version = "0.10.0-rc.3", features = [
]}
openssl = { version = "0.10.48", features = ["vendored"] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dependencies.db-core]

135
Makefile
View file

@ -2,6 +2,39 @@ BUNDLE = static/cache/bundle
OPENAPI = docs/openapi
CLEAN_UP = $(BUNDLE) src/cache_buster_data.json assets
define deploy_dependencies ## deploy dependencies
@-docker create --name ${db} \
-e POSTGRES_PASSWORD=password \
-p 5432:5432 \
postgres
@-docker create \
-p 3306:3306 \
--name ${mdb} \
--env MARIADB_USER=maria \
--env MARIADB_PASSWORD=password \
--env MARIADB_ROOT_PASSWORD=password \
--env MARIADB_DATABASE=maria \
mariadb:latest
@-docker create \
-p 6379:6379 \
--name mcaptcha-cache \
mcaptcha/cache:latest
docker start ${db}
docker start ${mdb}
docker start mcaptcha-cache
endef
define run_migrations ## run database migrations
cd db/db-migrations/ && cargo run
endef
define run_dev_migrations ## run database migrations
cd db/db-sqlx-maria/ && \
DATABASE_URL=${MARIA_DATABASE_URL} sqlx migrate run
cd db/db-sqlx-postgres/ && \
DATABASE_URL=${POSTGRES_DATABASE_URL} sqlx migrate run
endef
define frontend_env ## install frontend deps
yarn install
cd docs/openapi && yarn install
@ -11,6 +44,30 @@ define cache_bust ## run cache_busting program
cd utils/cache-bust && cargo run
endef
define test_frontend ## run frontend tests
cd $(OPENAPI)&& yarn test
yarn test
endef
define test_db_sqlx_postgres
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
endef
define test_db_sqlx_maria
cd db/db-sqlx-maria &&\
DATABASE_URL=${MARIA_DATABASE_URL}\
cargo test --no-fail-fast
endef
define test_core
cargo test --no-fail-fast
endef
default: frontend ## Build app in debug mode
$(call cache_bust)
cargo build
@ -35,10 +92,6 @@ clean: ## Delete build artifacts
@yarn cache clean
@-rm $(CLEAN_UP)
coverage: migrate ## Generate code coverage report in HTML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Html
doc: ## Generate documentation
#yarn doc
cargo doc --no-deps --workspace --all-features
@ -54,6 +107,19 @@ env: ## Setup development environtment
cargo fetch
$(call frontend_env)
env.db: ## Deploy dependencies
$(call deploy_dependencies)
sleep 5
$(call run_migrations)
env.db.recreate: ## Deploy dependencies from scratch
@-docker rm -f ${db}
@-docker rm -f ${mdb}
@-docker rm -f mcaptcha-cache
$(call deploy_dependencies)
sleep 5
$(call run_migrations)
frontend-env: ## Install frontend deps
$(call frontend_env)
@ -76,10 +142,6 @@ frontend: ## Build frontend
@./scripts/librejs.sh
@./scripts/cachebust.sh
frontend-test: ## Run frontend tests
cd $(OPENAPI)&& yarn test
yarn test
lint: ## Lint codebase
cargo fmt -v --all -- --emit files
cargo clippy --workspace --tests --all-features
@ -87,7 +149,10 @@ lint: ## Lint codebase
cd $(OPENAPI)&& yarn test
migrate: ## Run database migrations
cd db/db-migrations/ && cargo run
$(call run_migrations)
migrate.dev: ## Run database migrations during development
$(call run_dev_migrations)
release: frontend ## Build app with release optimizations
$(call cache_bust)
@ -98,37 +163,49 @@ run: frontend ## Run app in debug mode
cargo run
sqlx-offline-data: ## prepare sqlx offline data
db.sqlx.offline: ## prepare sqlx offline data
cd db/db-sqlx-postgres && cargo sqlx prepare \
--database-url=${POSTGRES_DATABASE_URL} -- \
--all-features
cd db/db-sqlx-maria && cargo sqlx prepare \
--database-url=${MARIA_DATABASE_URL} -- \
--all-features
# cd db/db-sqlx-sqlite/ \
# && DATABASE_URL=${SQLITE_DATABASE_URL} cargo sqlx prepare
test-db: ## run tests on database
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
test: frontend-test frontend ## Run all available tests
test: frontend ## Run all available tests
$(call test_frontend)
$(call cache_bust)
cd db/db-sqlx-postgres &&\
DATABASE_URL=${POSTGRES_DATABASE_URL}\
cargo test --no-fail-fast
cd db/db-sqlx-maria &&\
DATABASE_URL=${MARIA_DATABASE_URL}\
cargo test --no-fail-fast
cargo test --no-fail-fast
$(call test_db_sqlx_postgres)
$(call test_db_sqlx_maria)
$(call test_core)
# ./scripts/tests.sh
test.cov.html: migrate ## Generate code coverage report in HTML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Html
test.cov.xml: migrate ## Generate code coverage report in XML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Xml
test.core: ## Run all core tests
$(call test_core)
test.db: ## Run all database driver tests
$(call test_db_sqlx_postgres)
$(call test_db_sqlx_maria)
test.db.pg: ## Run Postgres database driver tests
$(call test_db_sqlx_postgres)
test.db.maria: ## Run Maria database driver tests
$(call test_db_sqlx_maria)
test.frontend: ## Run frontend tests
$(call test_frontend)
test.integration: ## run integration tests with nightwatch.js
./scripts/integration.sh
xml-test-coverage: migrate ## Generate code coverage report in XML format
$(call cache_bust)
cargo tarpaulin -t 1200 --out Xml
help: ## Prints help for targets with comments
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
@cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-].+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'

View file

@ -21,7 +21,7 @@ use sqlx::types::time::OffsetDateTime;
fn main() {
// note: add error checking yourself.
let output = Command::new("git")
.args(&["rev-parse", "HEAD"])
.args(["rev-parse", "HEAD"])
.output()
.unwrap();
let git_hash = String::from_utf8(output.stdout).unwrap();

View file

@ -13,8 +13,8 @@ async-trait = "0.1.51"
thiserror = "1.0.30"
serde = { version = "1", features = ["derive"]}
url = { version = "2.2.2", features = ["serde"] }
#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.2"}
libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.3"}
#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] }
[features]
default = []

View file

@ -250,6 +250,81 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase {
/// fetch PoWConfig confirms
async fn fetch_confirm(&self, user: &str, key: &str) -> DBResult<Vec<i64>>;
/// record PoW timing
async fn analysis_save(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
) -> DBResult<()>;
/// fetch PoW analytics
async fn analytics_fetch(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>>;
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()>;
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String>;
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
&self,
psuedo_id: &str,
) -> DBResult<String>;
/// Delete all records for campaign
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()>;
/// Get publishing status of pow analytics for captcha ID/ campaign ID
async fn analytics_captcha_is_published(&self, campaign_id: &str) -> DBResult<bool> {
match self
.analytics_get_psuedo_id_from_capmaign_id(campaign_id)
.await
{
Ok(_) => Ok(true),
Err(errors::DBError::CaptchaNotFound) => Ok(false),
Err(e) => Err(e),
}
}
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
/// Log Proof-of-Work CAPTCHA performance analytics
pub struct CreatePerformanceAnalytics {
/// time taken to generate proof
pub time: u32,
/// difficulty factor for which the proof was generated
pub difficulty_factor: u32,
/// worker/client type: wasm, javascript, python, etc.
pub worker_type: String,
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
/// Proof-of-Work CAPTCHA performance analytics
pub struct PerformanceAnalytics {
/// log ID
pub id: usize,
/// time taken to generate proof
pub time: u32,
/// difficulty factor for which the proof was generated
pub difficulty_factor: u32,
/// worker/client type: wasm, javascript, python, etc.
pub worker_type: String,
}
#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)]
@ -332,7 +407,6 @@ pub struct Secret {
/// user's secret
pub secret: String,
}
/// Trait to clone MCDatabase
pub trait CloneSPDatabase {
/// clone DB

View file

@ -260,6 +260,60 @@ pub async fn database_works<'a, T: MCDatabase>(
db.record_solve(c.key).await.unwrap();
db.record_confirm(c.key).await.unwrap();
// analytics start
db.analytics_create_psuedo_id_if_not_exists(c.key)
.await
.unwrap();
let psuedo_id = db
.analytics_get_psuedo_id_from_capmaign_id(c.key)
.await
.unwrap();
db.analytics_create_psuedo_id_if_not_exists(c.key)
.await
.unwrap();
assert_eq!(
psuedo_id,
db.analytics_get_psuedo_id_from_capmaign_id(c.key)
.await
.unwrap()
);
assert_eq!(
c.key,
db.analytics_get_capmaign_id_from_psuedo_id(&psuedo_id)
.await
.unwrap()
);
let analytics = CreatePerformanceAnalytics {
time: 0,
difficulty_factor: 0,
worker_type: "wasm".into(),
};
db.analysis_save(c.key, &analytics).await.unwrap();
let limit = 50;
let mut offset = 0;
let a = db.analytics_fetch(c.key, limit, offset).await.unwrap();
assert_eq!(a[0].time, analytics.time);
assert_eq!(a[0].difficulty_factor, analytics.difficulty_factor);
assert_eq!(a[0].worker_type, analytics.worker_type);
offset += 1;
assert!(db
.analytics_fetch(c.key, limit, offset)
.await
.unwrap()
.is_empty());
db.analytics_delete_all_records_for_campaign(c.key)
.await
.unwrap();
assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0);
assert!(!db.analytics_captcha_is_published(c.key).await.unwrap());
db.analytics_delete_all_records_for_campaign(c.key)
.await
.unwrap();
// analytics end
assert_eq!(db.fetch_solve(p.username, c.key).await.unwrap().len(), 1);
assert_eq!(
db.fetch_config_fetched(p.username, c.key)

View file

@ -13,6 +13,7 @@ async-trait = "0.1.51"
db-core = {path = "../db-core"}
futures = "0.3.15"
sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "mysql", "time", "offline" ] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dev-dependencies]
actix-rt = "2"

View file

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics (
ID INT auto_increment,
PRIMARY KEY(ID),
config_id INTEGER NOT NULL,
time INTEGER NOT NULL,
difficulty_factor INTEGER NOT NULL,
worker_type VARCHAR(100) NOT NULL,
CONSTRAINT `fk_mcaptcha_config_id_pow_analytics`
FOREIGN KEY (config_id)
REFERENCES mcaptcha_config (config_id)
ON DELETE CASCADE
ON UPDATE CASCADE
);

View file

@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id (
ID INT auto_increment,
PRIMARY KEY(ID),
psuedo_id varchar(100) NOT NULL UNIQUE,
config_id INT NOT NULL,
CONSTRAINT `fk_mcaptcha_psuedo_campaign_id_config_id`
FOREIGN KEY (config_id)
REFERENCES mcaptcha_config (config_id)
ON DELETE CASCADE
ON UPDATE CASCADE
);

View file

@ -25,6 +25,31 @@
},
"query": "SELECT time FROM mcaptcha_pow_confirmed_stats \n WHERE \n config_id = (\n SELECT config_id FROM mcaptcha_config \n WHERE \n captcha_key = ?\n AND\n user_id = (\n SELECT \n ID FROM mcaptcha_users WHERE name = ?))\n ORDER BY time DESC"
},
"14dc89b2988b221fd24e4f319b1d48f5e6c65c760c30d11c9c29087f09cee23a": {
"describe": {
"columns": [
{
"name": "captcha_key",
"ordinal": 0,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4101
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "SELECT\n captcha_key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = ?\n );"
},
"22e697114c3ed5b0156cdceab11a398f1ef3a804f482e1cd948bc615ef95fc92": {
"describe": {
"columns": [],
@ -154,6 +179,31 @@
},
"query": "INSERT INTO mcaptcha_pow_fetched_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)"
},
"5ad1ef722a961183228d851813b9f50284520bf8cc8118c765b72c108daaf6fb": {
"describe": {
"columns": [
{
"name": "psuedo_id",
"ordinal": 0,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4101
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false
],
"parameters": {
"Right": 1
}
},
"query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?));\n "
},
"5d5a106981345e9f62bc2239c00cdc683d3aaaa820d63da300dc51e3f6f363d3": {
"describe": {
"columns": [],
@ -164,6 +214,16 @@
},
"query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES (?, ?, ?)"
},
"6094468b7fa20043b0da90e366b7f1fa29a8c748e163b6712725440b25ae9361": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) "
},
"66ec7df10484f8e0206f3c97afc9136021589556c38dbbed341d6574487f79f2": {
"describe": {
"columns": [
@ -406,6 +466,80 @@
},
"query": "UPDATE mcaptcha_users set email = ?\n WHERE name = ?"
},
"9e45969a0f79eab8caba41b0d91e5e3b85a1a68a49136f89fc90793c38f00041": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 2
}
},
"query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)),\n ?\n );"
},
"9f10afb0f242f11c58389803c5e85e244cc59102b8929a21e3fcaa852d57a52c": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": {
"char_set": 63,
"flags": {
"bits": 515
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "time",
"ordinal": 1,
"type_info": {
"char_set": 63,
"flags": {
"bits": 4097
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "difficulty_factor",
"ordinal": 2,
"type_info": {
"char_set": 63,
"flags": {
"bits": 4097
},
"max_size": 11,
"type": "Long"
}
},
{
"name": "worker_type",
"ordinal": 3,
"type_info": {
"char_set": 224,
"flags": {
"bits": 4097
},
"max_size": 400,
"type": "VarString"
}
}
],
"nullable": [
false,
false,
false,
false
],
"parameters": {
"Right": 3
}
},
"query": "SELECT\n id, time, difficulty_factor, worker_type\n FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) \n ORDER BY ID\n LIMIT ? OFFSET ?"
},
"a89c066db044cddfdebee6a0fd0d80a5a26dcb7ecc00a9899f5634b72ea0a952": {
"describe": {
"columns": [
@ -759,6 +893,16 @@
},
"query": "INSERT INTO mcaptcha_pow_solved_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)"
},
"e4d9bf156a368dcee1433dd5ced9f1991aa15f84e0ade916433aada40f68f0aa": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 1
}
},
"query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n );"
},
"e6569a6064d0e07abea4c0bd4686cdfdaac64f0109ac40efaed06a744a2eaf5e": {
"describe": {
"columns": [
@ -873,6 +1017,16 @@
},
"query": "SELECT name, password FROM mcaptcha_users WHERE email = ?"
},
"f987c4568ab28271d87af47f473b18cf41130a483333e81d5f50199758cbb98b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Right": 4
}
},
"query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)"
},
"fc717ff0827ccfaa1cc61a71cc7f71c348ebb03d35895c54b011c03121ad2385": {
"describe": {
"columns": [],

View file

@ -22,6 +22,7 @@ use sqlx::mysql::MySqlPoolOptions;
use sqlx::types::time::OffsetDateTime;
use sqlx::ConnectOptions;
use sqlx::MySqlPool;
use uuid::Uuid;
pub mod errors;
#[cfg(test)]
@ -895,6 +896,191 @@ impl MCDatabase for Database {
Ok(Date::dates_to_unix(records))
}
/// record PoW timing
async fn analysis_save(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
) -> DBResult<()> {
let _ = sqlx::query!(
"INSERT INTO mcaptcha_pow_analytics
(config_id, time, difficulty_factor, worker_type)
VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)",
captcha_id,
d.time as i32,
d.difficulty_factor as i32,
&d.worker_type,
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// fetch PoW analytics
async fn analytics_fetch(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>> {
struct P {
id: i32,
time: i32,
difficulty_factor: i32,
worker_type: String,
}
impl From<P> for PerformanceAnalytics {
fn from(v: P) -> Self {
Self {
id: v.id as usize,
time: v.time as u32,
difficulty_factor: v.difficulty_factor as u32,
worker_type: v.worker_type,
}
}
}
let mut c = sqlx::query_as!(
P,
"SELECT
id, time, difficulty_factor, worker_type
FROM
mcaptcha_pow_analytics
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
)
ORDER BY ID
LIMIT ? OFFSET ?",
&captcha_id,
limit as i64,
offset as i64,
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
let mut res = Vec::with_capacity(c.len());
for i in c.drain(0..) {
res.push(i.into())
}
Ok(res)
}
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()> {
let id = Uuid::new_v4();
sqlx::query!(
"
INSERT INTO
mcaptcha_psuedo_campaign_id (config_id, psuedo_id)
VALUES (
(SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)),
?
);",
captcha_id,
&id.to_string(),
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String> {
struct ID {
psuedo_id: String,
}
let res = sqlx::query_as!(
ID,
"SELECT psuedo_id FROM
mcaptcha_psuedo_campaign_id
WHERE
config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?));
",
captcha_id
).fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.psuedo_id)
}
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
&self,
psuedo_id: &str,
) -> DBResult<String> {
struct ID {
captcha_key: String,
}
let res = sqlx::query_as!(
ID,
"SELECT
captcha_key
FROM
mcaptcha_config
WHERE
config_id = (
SELECT
config_id
FROM
mcaptcha_psuedo_campaign_id
WHERE
psuedo_id = ?
);",
psuedo_id
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.captcha_key)
}
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()> {
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_psuedo_campaign_id
WHERE config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
);",
campaign_id
)
.execute(&self.pool)
.await;
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_pow_analytics
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?
) ",
campaign_id
)
.execute(&self.pool)
.await;
Ok(())
}
}
#[derive(Clone)]

View file

@ -13,6 +13,7 @@ async-trait = "0.1.51"
db-core = {path = "../db-core"}
futures = "0.3.15"
sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "postgres", "time", "offline" ] }
uuid = { version = "1.4.0", features = ["v4", "serde"] }
[dev-dependencies]
actix-rt = "2"

View file

@ -0,0 +1,7 @@
CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics (
config_id INTEGER references mcaptcha_config(config_id) ON DELETE CASCADE,
time INTEGER NOT NULL,
difficulty_factor INTEGER NOT NULL,
worker_type VARCHAR(100) NOT NULL,
ID SERIAL PRIMARY KEY NOT NULL
);

View file

@ -0,0 +1,5 @@
CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id (
id SERIAL PRIMARY KEY NOT NULL,
config_id INTEGER NOT NULL references mcaptcha_config(config_id) ON DELETE CASCADE,
psuedo_id varchar(100) NOT NULL UNIQUE
);

View file

@ -1,5 +1,45 @@
{
"db": "PostgreSQL",
"017576128f1c63aee062799a33f872457fe19f5d6429d0af312dc00c244b31cb": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int4"
},
{
"name": "time",
"ordinal": 1,
"type_info": "Int4"
},
{
"name": "difficulty_factor",
"ordinal": 2,
"type_info": "Int4"
},
{
"name": "worker_type",
"ordinal": 3,
"type_info": "Varchar"
}
],
"nullable": [
false,
false,
false,
false
],
"parameters": {
"Left": [
"Text",
"Int8",
"Int8"
]
}
},
"query": "SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics\n WHERE \n config_id = (\n SELECT \n config_id FROM mcaptcha_config \n WHERE \n key = $1\n )\n ORDER BY ID\n OFFSET $2 LIMIT $3\n "
},
"02deb524bb12632af9b7883975f75fdc30d6775d836aff647add1dffd1a4bc00": {
"describe": {
"columns": [
@ -132,6 +172,26 @@
},
"query": "UPDATE mcaptcha_users set name = $1\n WHERE name = $2"
},
"21cdf28d8962389d22c8ddefdad82780f5316737e3d833623512aa12a54a026a": {
"describe": {
"columns": [
{
"name": "key",
"ordinal": 0,
"type_info": "Varchar"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT\n key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = $1\n );"
},
"2b319a202bb983d5f28979d1e371f399125da1122fbda36a5a55b75b9c743451": {
"describe": {
"columns": [],
@ -180,6 +240,18 @@
},
"query": "SELECT email FROM mcaptcha_users WHERE name = $1"
},
"30d8945806b4c68b6da800395f61c1e480839093bfcda9c693bf1972a65c7d79": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = ($1)\n );"
},
"3b1c8128fc48b16d8e8ea6957dd4fbc0eb19ae64748fd7824e9f5e1901dd1726": {
"describe": {
"columns": [],
@ -406,6 +478,26 @@
},
"query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES ($1, $2, $3)"
},
"839dfdfc3543b12128cb2b44bf356cd81f3da380963e5684ec3624a0ea4f9547": {
"describe": {
"columns": [
{
"name": "psuedo_id",
"ordinal": 0,
"type_info": "Varchar"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1));\n "
},
"84484cb6892db29121816bc5bff5702b9e857e20aa14e79d080d78ae7593153b": {
"describe": {
"columns": [
@ -493,6 +585,33 @@
},
"query": "SELECT EXISTS (SELECT 1 from mcaptcha_users WHERE name = $1)"
},
"af47990880a92c63d1cf5192203899c72621479dc6bb47859fb4498264b78033": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Int4",
"Int4",
"Varchar"
]
}
},
"query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)"
},
"b67da576ff30a1bc8b1c0a79eff07f0622bd9ea035d3de15b91f5e1e8a5fda9b": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text"
]
}
},
"query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = $1\n )\n "
},
"b97d810814fbeb2df19f47bcfa381bc6fb7ac6832d040b377cf4fca2ca896cfb": {
"describe": {
"columns": [],
@ -545,6 +664,19 @@
},
"query": "SELECT name, password FROM mcaptcha_users WHERE email = ($1)"
},
"c1bb8e02d1f9dc28322309d055de3c40ed4e1a1b9453a7e5a93a70e5186d762d": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Text",
"Varchar"
]
}
},
"query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE key = ($1)),\n $2\n );"
},
"c2e167e56242de7e0a835e25004b15ca8340545fa0ca7ac8f3293157d2d03d98": {
"describe": {
"columns": [

View file

@ -22,6 +22,7 @@ use sqlx::postgres::PgPoolOptions;
use sqlx::types::time::OffsetDateTime;
use sqlx::ConnectOptions;
use sqlx::PgPool;
use uuid::Uuid;
pub mod errors;
#[cfg(test)]
@ -901,6 +902,194 @@ impl MCDatabase for Database {
Ok(Date::dates_to_unix(records))
}
/// record PoW timing
async fn analysis_save(
&self,
captcha_id: &str,
d: &CreatePerformanceAnalytics,
) -> DBResult<()> {
let _ = sqlx::query!(
"INSERT INTO mcaptcha_pow_analytics
(config_id, time, difficulty_factor, worker_type)
VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)",
captcha_id,
d.time as i32,
d.difficulty_factor as i32,
&d.worker_type,
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// fetch PoW analytics
async fn analytics_fetch(
&self,
captcha_id: &str,
limit: usize,
offset: usize,
) -> DBResult<Vec<PerformanceAnalytics>> {
struct P {
id: i32,
time: i32,
difficulty_factor: i32,
worker_type: String,
}
impl From<P> for PerformanceAnalytics {
fn from(v: P) -> Self {
Self {
time: v.time as u32,
difficulty_factor: v.difficulty_factor as u32,
worker_type: v.worker_type,
id: v.id as usize,
}
}
}
let mut c = sqlx::query_as!(
P,
"SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics
WHERE
config_id = (
SELECT
config_id FROM mcaptcha_config
WHERE
key = $1
)
ORDER BY ID
OFFSET $2 LIMIT $3
",
&captcha_id,
offset as i32,
limit as i32
)
.fetch_all(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
let mut res = Vec::with_capacity(c.len());
for i in c.drain(0..) {
res.push(i.into())
}
Ok(res)
}
/// Create psuedo ID against campaign ID to publish analytics
async fn analytics_create_psuedo_id_if_not_exists(
&self,
captcha_id: &str,
) -> DBResult<()> {
let id = Uuid::new_v4();
sqlx::query!(
"
INSERT INTO
mcaptcha_psuedo_campaign_id (config_id, psuedo_id)
VALUES (
(SELECT config_id FROM mcaptcha_config WHERE key = ($1)),
$2
);",
captcha_id,
&id.to_string(),
)
.execute(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(())
}
/// Get psuedo ID from campaign ID
async fn analytics_get_psuedo_id_from_capmaign_id(
&self,
captcha_id: &str,
) -> DBResult<String> {
struct ID {
psuedo_id: String,
}
let res = sqlx::query_as!(
ID,
"SELECT psuedo_id FROM
mcaptcha_psuedo_campaign_id
WHERE
config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1));
",
captcha_id
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.psuedo_id)
}
/// Get campaign ID from psuedo ID
async fn analytics_get_capmaign_id_from_psuedo_id(
&self,
psuedo_id: &str,
) -> DBResult<String> {
struct ID {
key: String,
}
let res = sqlx::query_as!(
ID,
"SELECT
key
FROM
mcaptcha_config
WHERE
config_id = (
SELECT
config_id
FROM
mcaptcha_psuedo_campaign_id
WHERE
psuedo_id = $1
);",
psuedo_id
)
.fetch_one(&self.pool)
.await
.map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?;
Ok(res.key)
}
async fn analytics_delete_all_records_for_campaign(
&self,
campaign_id: &str,
) -> DBResult<()> {
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_psuedo_campaign_id
WHERE config_id = (
SELECT config_id FROM mcaptcha_config WHERE key = ($1)
);",
campaign_id
)
.execute(&self.pool)
.await;
let _ = sqlx::query!(
"
DELETE FROM
mcaptcha_pow_analytics
WHERE
config_id = (
SELECT config_id FROM mcaptcha_config WHERE key = $1
)
",
campaign_id
)
.execute(&self.pool)
.await;
Ok(())
}
}
#[derive(Clone)]

View file

@ -31,6 +31,7 @@ pub struct CreateCaptcha {
pub levels: Vec<Level>,
pub duration: u32,
pub description: String,
pub publish_benchmarks: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
@ -52,6 +53,11 @@ pub async fn create(
) -> ServiceResult<impl Responder> {
let username = id.identity().unwrap();
let mcaptcha_config = runner::create(&payload, &data, &username).await?;
if payload.publish_benchmarks {
data.db
.analytics_create_psuedo_id_if_not_exists(&mcaptcha_config.key)
.await?;
}
Ok(HttpResponse::Ok().json(mcaptcha_config))
}

View file

@ -60,6 +60,9 @@ pub struct TrafficPatternRequest {
pub broke_my_site_traffic: Option<u32>,
/// Captcha description
pub description: String,
/// publish benchmarks
pub publish_benchmarks: bool,
}
impl From<&TrafficPatternRequest> for TrafficPattern {
@ -127,12 +130,14 @@ async fn create(
levels,
duration: data.settings.captcha.default_difficulty_strategy.duration,
description: payload.description,
publish_benchmarks: payload.publish_benchmarks,
};
let mcaptcha_config = create_runner(&msg, &data, &username).await?;
data.db
.add_traffic_pattern(&username, &mcaptcha_config.key, &pattern)
.await?;
Ok(HttpResponse::Ok().json(mcaptcha_config))
}
@ -162,6 +167,7 @@ async fn update(
duration: data.settings.captcha.default_difficulty_strategy.duration,
description: payload.pattern.description,
key: payload.key,
publish_benchmarks: payload.pattern.publish_benchmarks,
};
update_captcha_runner(&msg, &data, &username).await?;
@ -292,6 +298,7 @@ pub mod tests {
peak_sustainable_traffic: 1_000_000,
broke_my_site_traffic: Some(10_000_000),
description: NAME.into(),
publish_benchmarks: false,
};
let default_levels = calculate(
@ -323,6 +330,11 @@ pub mod tests {
assert_eq!(get_level_resp.status(), StatusCode::OK);
let res_levels: Vec<Level> = test::read_body_json(get_level_resp).await;
assert_eq!(res_levels, default_levels);
assert!(!data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
// END create_easy
// START update_easy
@ -331,6 +343,7 @@ pub mod tests {
peak_sustainable_traffic: 10_000,
broke_my_site_traffic: Some(1_000_000),
description: NAME.into(),
publish_benchmarks: true,
};
let updated_default_values = calculate(
@ -352,6 +365,11 @@ pub mod tests {
)
.await;
assert_eq!(update_token_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
let get_level_resp = test::call_service(
&app,
@ -394,5 +412,52 @@ pub mod tests {
));
assert!(body.contains(&payload.pattern.avg_traffic.to_string()));
assert!(body.contains(&payload.pattern.peak_sustainable_traffic.to_string()));
// START update_easy to delete published results
let mut payload2 = TrafficPatternRequest {
avg_traffic: 100_000,
peak_sustainable_traffic: 1_000_000,
broke_my_site_traffic: Some(10_000_000),
description: NAME.into(),
publish_benchmarks: true,
};
let add_token_resp = test::call_service(
&app,
post_request!(&payload2, ROUTES.captcha.easy.create)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(add_token_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_captcha_is_published(&token_key.key)
.await
.unwrap());
let token_key2: MCaptchaDetails = test::read_body_json(add_token_resp).await;
payload2.publish_benchmarks = false;
let payload = UpdateTrafficPattern {
pattern: payload2,
key: token_key2.key.clone(),
};
let update_token_resp = test::call_service(
&app,
post_request!(&payload, ROUTES.captcha.easy.update)
.cookie(cookies.clone())
.to_request(),
)
.await;
assert_eq!(update_token_resp.status(), StatusCode::OK);
assert!(!data
.db
.analytics_captcha_is_published(&token_key2.key)
.await
.unwrap());
}
}

View file

@ -82,6 +82,7 @@ pub async fn level_routes_work(data: ArcData) {
levels: levels.clone(),
description: add_level.description,
duration: add_level.duration,
publish_benchmarks: true,
};
let add_token_resp = test::call_service(

View file

@ -76,6 +76,7 @@ pub struct UpdateCaptcha {
pub duration: u32,
pub description: String,
pub key: String,
pub publish_benchmarks: bool,
}
#[my_codegen::post(
@ -139,6 +140,16 @@ pub mod runner {
e
);
}
if payload.publish_benchmarks {
data.db
.analytics_create_psuedo_id_if_not_exists(&payload.key)
.await?;
} else {
data.db
.analytics_delete_all_records_for_campaign(&payload.key)
.await?;
}
Ok(())
}
}

View file

@ -109,8 +109,8 @@ pub async fn init_mcaptcha(data: &AppData, key: &str) -> ServiceResult<()> {
for level in levels.iter() {
let level = LevelBuilder::default()
.visitor_threshold(level.visitor_threshold as u32)
.difficulty_factor(level.difficulty_factor as u32)
.visitor_threshold(level.visitor_threshold)
.difficulty_factor(level.difficulty_factor)
.unwrap()
.build()
.unwrap();
@ -250,6 +250,7 @@ pub mod tests {
levels: levels.into(),
duration: 30,
description: "dummy".into(),
publish_benchmarks: true,
};
// 1. add level
@ -267,11 +268,11 @@ pub mod tests {
key: token_key.key.clone(),
};
let url = V1_API_ROUTES.pow.get_config;
let _url = V1_API_ROUTES.pow.get_config;
let mut prev = 0;
for (count, l) in levels.iter().enumerate() {
for l in prev..l.visitor_threshold * 2 {
let get_config_resp = test::call_service(
for _l in prev..l.visitor_threshold * 2 {
let _get_config_resp = test::call_service(
&app,
post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config)
.to_request(),

View file

@ -32,6 +32,27 @@ pub struct ValidationToken {
pub token: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ApiWork {
pub string: String,
pub result: String,
pub nonce: u64,
pub key: String,
pub time: Option<u32>,
pub worker_type: Option<String>,
}
impl From<ApiWork> for Work {
fn from(value: ApiWork) -> Self {
Self {
string: value.string,
nonce: value.nonce,
result: value.result,
key: value.key,
}
}
}
// API keys are mcaptcha actor names
/// route handler that verifies PoW and issues a solution token
@ -39,7 +60,7 @@ pub struct ValidationToken {
#[my_codegen::post(path = "V1_API_ROUTES.pow.verify_pow()")]
pub async fn verify_pow(
req: HttpRequest,
payload: web::Json<Work>,
payload: web::Json<ApiWork>,
data: AppData,
) -> ServiceResult<impl Responder> {
#[cfg(not(test))]
@ -52,8 +73,19 @@ pub async fn verify_pow(
let ip = "127.0.1.1".into();
let key = payload.key.clone();
let res = data.captcha.verify_pow(payload.into_inner(), ip).await?;
let payload = payload.into_inner();
let worker_type = payload.worker_type.clone();
let time = payload.time;
let (res, difficulty_factor) = data.captcha.verify_pow(payload.into(), ip).await?;
data.stats.record_solve(&data, &key).await?;
if time.is_some() && worker_type.is_some() {
let analytics = db_core::CreatePerformanceAnalytics {
difficulty_factor,
time: time.unwrap(),
worker_type: worker_type.unwrap(),
};
data.db.analysis_save(&key, &analytics).await?;
}
let payload = ValidationToken { token: res };
Ok(HttpResponse::Ok().json(payload))
}
@ -81,6 +113,81 @@ pub mod tests {
verify_pow_works(data).await;
}
#[actix_rt::test]
async fn verify_analytics_pow_works_pg() {
let data = crate::tests::pg::get_data().await;
verify_analytics_pow_works(data).await;
}
#[actix_rt::test]
async fn verify_analytics_pow_works_maria() {
let data = crate::tests::maria::get_data().await;
verify_analytics_pow_works(data).await;
}
pub async fn verify_analytics_pow_works(data: ArcData) {
const NAME: &str = "powanalyticsuser";
const PASSWORD: &str = "testingpas";
const EMAIL: &str = "powanalyticsuser@a.com";
let data = &data;
delete_user(data, NAME).await;
register_and_signin(data, NAME, EMAIL, PASSWORD).await;
let (_, _signin_resp, token_key) = add_levels_util(data, NAME, PASSWORD).await;
let app = get_app!(data).await;
let get_config_payload = GetConfigPayload {
key: token_key.key.clone(),
};
// update and check changes
let get_config_resp = test::call_service(
&app,
post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config)
.to_request(),
)
.await;
assert_eq!(get_config_resp.status(), StatusCode::OK);
let config: PoWConfig = test::read_body_json(get_config_resp).await;
let pow = pow_sha256::ConfigBuilder::default()
.salt(config.salt)
.build()
.unwrap();
let work = pow
.prove_work(&config.string.clone(), config.difficulty_factor)
.unwrap();
let work = ApiWork {
string: config.string.clone(),
result: work.result,
nonce: work.nonce,
key: token_key.key.clone(),
time: Some(100),
worker_type: Some("wasm".into()),
};
let pow_verify_resp = test::call_service(
&app,
post_request!(&work, V1_API_ROUTES.pow.verify_pow).to_request(),
)
.await;
assert_eq!(pow_verify_resp.status(), StatusCode::OK);
let limit = 50;
let offset = 0;
let mut analytics = data
.db
.analytics_fetch(&token_key.key, limit, offset)
.await
.unwrap();
assert_eq!(analytics.len(), 1);
let a = analytics.pop().unwrap();
assert_eq!(a.time, work.time.unwrap());
assert_eq!(a.worker_type, work.worker_type.unwrap());
}
pub async fn verify_pow_works(data: ArcData) {
const NAME: &str = "powverifyusr";
const PASSWORD: &str = "testingpas";
@ -129,6 +236,12 @@ pub mod tests {
)
.await;
assert_eq!(pow_verify_resp.status(), StatusCode::OK);
assert!(data
.db
.analytics_fetch(&token_key.key, 50, 0)
.await
.unwrap()
.is_empty());
let string_not_found = test::call_service(
&app,

View file

@ -83,7 +83,11 @@ impl SystemGroup {
enum_system_wrapper!(get_pow, String, CaptchaResult<Option<PoWConfig>>);
// utility function to verify [Work]
pub async fn verify_pow(&self, msg: Work, ip: String) -> CaptchaResult<String> {
pub async fn verify_pow(
&self,
msg: Work,
ip: String,
) -> CaptchaResult<(String, u32)> {
match self {
Self::Embedded(val) => val.verify_pow(msg, ip).await,
Self::Redis(val) => val.verify_pow(msg, ip).await,
@ -203,9 +207,9 @@ impl Data {
};
let stats: Box<dyn Stats> = if s.captcha.enable_stats {
Box::new(Real::default())
Box::<Real>::default()
} else {
Box::new(Dummy::default())
Box::<Dummy>::default()
};
let data = Data {

View file

@ -132,7 +132,7 @@ mod tests {
let duration = Duration::from_secs(DURATION);
// register works
let _ = DemoUser::register_demo_user(&data).await.unwrap();
DemoUser::register_demo_user(&data).await.unwrap();
let payload = AccountCheckPayload {
val: DEMO_USER.into(),
};

View file

@ -35,15 +35,22 @@ struct AdvanceEditPage {
name: String,
key: String,
levels: Vec<Level>,
publish_benchmarks: bool,
}
impl AdvanceEditPage {
fn new(config: Captcha, levels: Vec<Level>, key: String) -> Self {
fn new(
config: Captcha,
levels: Vec<Level>,
key: String,
publish_benchmarks: bool,
) -> Self {
AdvanceEditPage {
duration: config.duration as u32,
name: config.description,
levels,
key,
publish_benchmarks,
}
}
}
@ -63,8 +70,9 @@ pub async fn advance(
let config = data.db.get_captcha_config(&username, &key).await?;
let levels = data.db.get_captcha_levels(Some(&username), &key).await?;
let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?;
let body = AdvanceEditPage::new(config, levels, key)
let body = AdvanceEditPage::new(config, levels, key, publish_benchmarks)
.render_once()
.unwrap();
Ok(HttpResponse::Ok()
@ -106,11 +114,14 @@ pub async fn easy(
match data.db.get_traffic_pattern(&username, &key).await {
Ok(c) => {
let config = data.db.get_captcha_config(&username, &key).await?;
let publish_benchmarks =
data.db.analytics_captcha_is_published(&key).await?;
let pattern = TrafficPatternRequest {
peak_sustainable_traffic: c.peak_sustainable_traffic as u32,
avg_traffic: c.avg_traffic as u32,
broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n as u32),
peak_sustainable_traffic: c.peak_sustainable_traffic,
avg_traffic: c.avg_traffic,
broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n),
description: config.description,
publish_benchmarks,
};
let page = EasyEditPage::new(key, pattern).render_once().unwrap();

View file

@ -36,6 +36,7 @@ struct IndexPage {
key: String,
levels: Vec<Level>,
stats: CaptchaStats,
publish_benchmarks: bool,
}
impl IndexPage {
@ -44,6 +45,7 @@ impl IndexPage {
config: Captcha,
levels: Vec<Level>,
key: String,
publish_benchmarks: bool,
) -> Self {
IndexPage {
duration: config.duration as u32,
@ -51,6 +53,7 @@ impl IndexPage {
levels,
key,
stats,
publish_benchmarks,
}
}
}
@ -70,8 +73,9 @@ pub async fn view_sitekey(
let config = data.db.get_captcha_config(&username, &key).await?;
let levels = data.db.get_captcha_levels(Some(&username), &key).await?;
let stats = data.stats.fetch(&data, &username, &key).await?;
let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?;
let body = IndexPage::new(stats, config, levels, key)
let body = IndexPage::new(stats, config, levels, key, publish_benchmarks)
.render_once()
.unwrap();
Ok(HttpResponse::Ok()

View file

@ -19,7 +19,7 @@ use std::{env, fs};
use config::{Config, ConfigError, Environment, File};
use derive_more::Display;
use log::{debug, warn};
use serde::{Deserialize, Serialize};
use url::Url;
@ -191,7 +191,7 @@ impl Settings {
.unwrap();
log::info!("Overriding [database].url and [database].database_type with environment variable");
}
Err(e) => {
Err(_e) => {
set_database_url(&mut s);
}
}

View file

@ -52,8 +52,8 @@ pub mod pg {
settings.captcha.runners = Some(1);
settings.database.url = url.clone();
settings.database.database_type = DBType::Postgres;
let data = Data::new(&settings).await;
data
Data::new(&settings).await
}
}
pub mod maria {
@ -71,8 +71,8 @@ pub mod maria {
settings.captcha.runners = Some(1);
settings.database.url = url.clone();
settings.database.database_type = DBType::Maria;
let data = Data::new(&settings).await;
data
Data::new(&settings).await
}
}
//pub async fn get_data() -> ArcData {
@ -118,7 +118,7 @@ macro_rules! get_app {
.wrap(actix_middleware::NormalizePath::new(
actix_middleware::TrailingSlash::Trim,
))
.configure(crate::routes::services),
.configure($crate::routes::services),
)
};
($data:expr) => {
@ -262,5 +262,6 @@ pub fn get_level_data() -> CreateCaptcha {
levels,
duration: 30,
description: "dummy".into(),
publish_benchmarks: false,
}
}

View file

@ -45,5 +45,16 @@
<. } .>
<. } .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
name="publish_benchmarks"
id="publish_benchmarks"
/>
</label>
<button class="sitekey-form__submit" type="submit">Submit</button>
</form>

View file

@ -38,6 +38,13 @@ export const addSubmitEventListener = (): void =>
const submit = async (e: Event) => {
e.preventDefault();
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
FORM.querySelector("#publish_benchmarks")
);
const description = validateDescription(e);
const duration = validateDuration();
@ -50,6 +57,7 @@ const submit = async (e: Event) => {
levels: levels,
duration,
description,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
console.debug(`[form submition] json payload: ${JSON.stringify(payload)}`);

View file

@ -23,6 +23,7 @@
/>
</label>
<label class="sitekey-form__label" for="avg_traffic">
Average Traffic of your website
<input
@ -38,7 +39,6 @@
</label>
<label class="sitekey-form__label" for="avg_traffic">
Maximum traffic that your website can handle
<input
@ -68,5 +68,17 @@
/>
</label>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
name="publish_benchmarks"
id="publish_benchmarks"
/>
</label>
<button class="sitekey-form__submit" type="submit">Submit</button>
</form>

View file

@ -42,6 +42,7 @@ type TrafficPattern = {
peak_sustainable_traffic: number;
broke_my_site_traffic?: number;
description: string;
publish_benchmarks: boolean;
};
export const validate = (e: Event): TrafficPattern => {
@ -49,9 +50,7 @@ export const validate = (e: Event): TrafficPattern => {
let broke_is_set = false;
const AVG_TRAFFIC = <HTMLInputElement>(
FORM.querySelector("#avg_traffic")
);
const AVG_TRAFFIC = <HTMLInputElement>FORM.querySelector("#avg_traffic");
const PEAK_TRAFFIC = <HTMLInputElement>(
FORM.querySelector("#peak_sustainable_traffic")
);
@ -59,6 +58,10 @@ export const validate = (e: Event): TrafficPattern => {
FORM.querySelector("#broke_my_site_traffic")
);
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
FORM.querySelector("#publish_benchmarks")
);
isBlankString(AVG_TRAFFIC.value, avg_traffic_name);
isBlankString(PEAK_TRAFFIC.value, peak_traffic_name);
@ -101,6 +104,7 @@ export const validate = (e: Event): TrafficPattern => {
peak_sustainable_traffic,
broke_my_site_traffic,
description,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
return payload;

View file

@ -16,6 +16,22 @@
<. } .>
<. let level = levels.len() + 1; .>
<. include!("../add/advance/add-level.html"); .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
name="publish_benchmarks"
<. if publish_benchmarks { .>
checked
<. }.>
/>
</label>
<button data-sitekey="<.= key .>"
id="sitekey-form__submit" class="sitekey-form__submit" type="submit">
Submit

View file

@ -61,6 +61,21 @@
/>
</label>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
name="publish_benchmarks"
<. if pattern.publish_benchmarks { .>
checked
<. }.>
/>
</label>
<button data-sitekey="<.= key .>" class="sitekey-form__submit" type="submit">
Submit
</button>

View file

@ -47,11 +47,19 @@ const submit = async (e: Event) => {
const key = BTN.get().dataset.sitekey;
const PUBLISH_BENCHMARKS = <HTMLInputElement>(
Add.FORM.querySelector("#publish_benchmarks")
);
const payload = {
levels,
duration,
description,
key,
publish_benchmarks: PUBLISH_BENCHMARKS.checked,
};
console.debug(`[form submition] json payload: ${JSON.stringify(payload)}`);

View file

@ -19,7 +19,6 @@
<label class="sitekey-form__level-label" for="difficulty<.= num .>">
Difficulty
<input
readonly="readonly"
type="number"
id="difficulty<.= num .>"
class="sitekey-form__level-input"

View file

@ -23,6 +23,23 @@
<. for (count, level) in levels.iter().enumerate() { .>
<. include!("./existing-level.html"); .>
<. } .>
<label class="sitekey-form__label" for="publish_benchmarks">
Anonymously publish CAPTCHA performance statistics to help other webmasters
<input
class="sitekey-form__input"
type="checkbox"
id="publish_benchmarks"
readonly="readonly"
name="publish_benchmarks"
<. if publish_benchmarks { .>
checked
<. }.>
/>
</label>
<./* synchronise with "./__form-bottom.html" Lines below should break form */.>
</form>
<. include!("./stats.html"); .>

View file

@ -55,7 +55,7 @@ export const solveCaptchaRunner = async (e: Event): Promise<void> => {
worker.onmessage = async (event: MessageEvent) => {
const resp: ServiceWorkerWork = event.data;
console.log(
`Proof generated. Difficuly: ${config.difficulty_factor} Duration: ${resp.duration}`
`Proof generated. Difficuly: ${config.difficulty_factor} Duration: ${resp.work.time}`
);
const proof: Work = {
@ -63,6 +63,8 @@ export const solveCaptchaRunner = async (e: Event): Promise<void> => {
string: config.string,
nonce: resp.work.nonce,
result: resp.work.result,
time: Math.trunc(resp.work.time),
worker_type: resp.work.worker_type,
};
// 3. submit work

View file

@ -11,31 +11,66 @@
import { gen_pow } from "@mcaptcha/pow-wasm";
import * as p from "@mcaptcha/pow_sha256-polyfill";
import { WasmWork, PoWConfig } from "./types";
import { WasmWork, PoWConfig, SubmitWork } from "./types";
/**
* proove work
* @param {PoWConfig} config - the proof-of-work configuration using which
* work needs to be computed
* */
const prove = async (config: PoWConfig): Promise<WasmWork> => {
let proof: WasmWork = null;
const prove = async (config: PoWConfig): Promise<SubmitWork> => {
const WASM = "wasm";
const JS = "js";
if (WasmSupported) {
let proof: WasmWork = null;
let res: SubmitWork = null;
let time: number = null;
const t0 = performance.now();
const proofString = gen_pow(
config.salt,
config.string,
config.difficulty_factor
);
const t1 = performance.now();
time = t1 - t0;
proof = JSON.parse(proofString);
const worker_type = WASM;
res = {
result: proof.result,
nonce: proof.nonce,
worker_type,
time,
};
return res;
} else {
console.log("WASM unsupported, expect delay during proof generation");
let proof: WasmWork = null;
let time: number = null;
let res: SubmitWork = null;
const t0 = performance.now();
proof = await p.generate_work(
config.salt,
config.string,
config.difficulty_factor
);
const t1 = performance.now();
time = t1 - t0;
const worker_type = JS;
res = {
result: proof.result,
nonce: proof.nonce,
worker_type,
time,
};
return res;
}
return proof;
};
// credits: @jf-bastien on Stack Overflow

View file

@ -19,15 +19,9 @@ onmessage = async (e) => {
console.debug("message received at worker");
const config: PoWConfig = e.data;
const t0 = performance.now();
const work = await prove(config);
const t1 = performance.now();
const duration = t1 - t0;
const res: ServiceWorkerWork = {
work,
duration,
};
postMessage(res);

View file

@ -14,6 +14,15 @@ export type Work = {
nonce: number;
string: string;
key: string;
time: number;
worker_type: string;
};
export type SubmitWork = {
time: number;
worker_type: string;
result: string;
nonce: number;
};
export type WasmWork = {
@ -22,8 +31,7 @@ export type WasmWork = {
};
export type ServiceWorkerWork = {
work: WasmWork;
duration: number;
work: SubmitWork;
};
export type PoWConfig = {