diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index d42f3a6c..62cca4cd 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -52,7 +52,7 @@ jobs: maria: - image: mariadb + image: mariadb:10 env: MARIADB_USER: "maria" MARIADB_PASSWORD: "password" @@ -103,7 +103,7 @@ jobs: run: make frontend - name: Run the frontend tests - run: make frontend-test + run: make test.frontend - name: Run migrations run: make migrate diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 2e924696..328d56a4 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -54,7 +54,7 @@ jobs: - 10025:1025 maria: - image: mariadb + image: mariadb:10 env: MARIADB_USER: "maria" MARIADB_PASSWORD: "password" diff --git a/Cargo.lock b/Cargo.lock index 184645b2..8cab5bcf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -885,6 +885,7 @@ dependencies = [ "futures", "sqlx", "url", + "uuid", ] [[package]] @@ -897,6 +898,7 @@ dependencies = [ "futures", "sqlx", "url", + "uuid", ] [[package]] @@ -1580,8 +1582,8 @@ checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "libmcaptcha" -version = "0.2.2" -source = "git+https://github.com/mCaptcha/libmcaptcha?branch=master#22e3cb1a1c774ed352da17e4919e1ee177264e07" +version = "0.2.3" +source = "git+https://github.com/mCaptcha/libmcaptcha?tag=0.2.3#6bc0d11f8839d18c4d930cac0c0b86505ed1b2f6" dependencies = [ "actix", "crossbeam-channel", @@ -1722,6 +1724,7 @@ dependencies = [ "tokio", "url", "urlencoding", + "uuid", "validator", ] @@ -3392,6 +3395,16 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "uuid" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +dependencies = [ + "getrandom", + "serde 1.0.143", +] + [[package]] name = "validator" version = "0.15.0" diff --git a/Cargo.toml b/Cargo.toml index 5af201d3..25058093 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,8 +59,8 @@ log = "0.4" lazy_static = "1.4" -#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.2" } -libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] } +libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"], tag ="0.2.3" } +#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] } #libmcaptcha = { path = "../libmcaptcha", features = ["full"]} rand = "0.8" @@ -79,6 +79,7 @@ lettre = { version = "0.10.0-rc.3", features = [ ]} openssl = { version = "0.10.48", features = ["vendored"] } +uuid = { version = "1.4.0", features = ["v4", "serde"] } [dependencies.db-core] diff --git a/Makefile b/Makefile index a73d4fc7..e144d813 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,39 @@ BUNDLE = static/cache/bundle OPENAPI = docs/openapi CLEAN_UP = $(BUNDLE) src/cache_buster_data.json assets +define deploy_dependencies ## deploy dependencies + @-docker create --name ${db} \ + -e POSTGRES_PASSWORD=password \ + -p 5432:5432 \ + postgres + @-docker create \ + -p 3306:3306 \ + --name ${mdb} \ + --env MARIADB_USER=maria \ + --env MARIADB_PASSWORD=password \ + --env MARIADB_ROOT_PASSWORD=password \ + --env MARIADB_DATABASE=maria \ + mariadb:latest + @-docker create \ + -p 6379:6379 \ + --name mcaptcha-cache \ + mcaptcha/cache:latest + docker start ${db} + docker start ${mdb} + docker start mcaptcha-cache +endef + +define run_migrations ## run database migrations + cd db/db-migrations/ && cargo run +endef + +define run_dev_migrations ## run database migrations + cd db/db-sqlx-maria/ && \ + DATABASE_URL=${MARIA_DATABASE_URL} sqlx migrate run + cd db/db-sqlx-postgres/ && \ + DATABASE_URL=${POSTGRES_DATABASE_URL} sqlx migrate run +endef + define frontend_env ## install frontend deps yarn install cd docs/openapi && yarn install @@ -11,6 +44,30 @@ define cache_bust ## run cache_busting program cd utils/cache-bust && cargo run endef + +define test_frontend ## run frontend tests + cd $(OPENAPI)&& yarn test + yarn test +endef + +define test_db_sqlx_postgres + cd db/db-sqlx-postgres &&\ + DATABASE_URL=${POSTGRES_DATABASE_URL}\ + cargo test --no-fail-fast +endef + +define test_db_sqlx_maria + cd db/db-sqlx-maria &&\ + DATABASE_URL=${MARIA_DATABASE_URL}\ + cargo test --no-fail-fast +endef + +define test_core + cargo test --no-fail-fast +endef + + + default: frontend ## Build app in debug mode $(call cache_bust) cargo build @@ -35,10 +92,6 @@ clean: ## Delete build artifacts @yarn cache clean @-rm $(CLEAN_UP) -coverage: migrate ## Generate code coverage report in HTML format - $(call cache_bust) - cargo tarpaulin -t 1200 --out Html - doc: ## Generate documentation #yarn doc cargo doc --no-deps --workspace --all-features @@ -54,6 +107,19 @@ env: ## Setup development environtment cargo fetch $(call frontend_env) +env.db: ## Deploy dependencies + $(call deploy_dependencies) + sleep 5 + $(call run_migrations) + +env.db.recreate: ## Deploy dependencies from scratch + @-docker rm -f ${db} + @-docker rm -f ${mdb} + @-docker rm -f mcaptcha-cache + $(call deploy_dependencies) + sleep 5 + $(call run_migrations) + frontend-env: ## Install frontend deps $(call frontend_env) @@ -76,10 +142,6 @@ frontend: ## Build frontend @./scripts/librejs.sh @./scripts/cachebust.sh -frontend-test: ## Run frontend tests - cd $(OPENAPI)&& yarn test - yarn test - lint: ## Lint codebase cargo fmt -v --all -- --emit files cargo clippy --workspace --tests --all-features @@ -87,7 +149,10 @@ lint: ## Lint codebase cd $(OPENAPI)&& yarn test migrate: ## Run database migrations - cd db/db-migrations/ && cargo run + $(call run_migrations) + +migrate.dev: ## Run database migrations during development + $(call run_dev_migrations) release: frontend ## Build app with release optimizations $(call cache_bust) @@ -98,37 +163,49 @@ run: frontend ## Run app in debug mode cargo run -sqlx-offline-data: ## prepare sqlx offline data +db.sqlx.offline: ## prepare sqlx offline data cd db/db-sqlx-postgres && cargo sqlx prepare \ --database-url=${POSTGRES_DATABASE_URL} -- \ --all-features cd db/db-sqlx-maria && cargo sqlx prepare \ --database-url=${MARIA_DATABASE_URL} -- \ --all-features -# cd db/db-sqlx-sqlite/ \ -# && DATABASE_URL=${SQLITE_DATABASE_URL} cargo sqlx prepare -test-db: ## run tests on database - cd db/db-sqlx-postgres &&\ - DATABASE_URL=${POSTGRES_DATABASE_URL}\ - cargo test --no-fail-fast -test: frontend-test frontend ## Run all available tests +test: frontend ## Run all available tests + $(call test_frontend) $(call cache_bust) - cd db/db-sqlx-postgres &&\ - DATABASE_URL=${POSTGRES_DATABASE_URL}\ - cargo test --no-fail-fast - cd db/db-sqlx-maria &&\ - DATABASE_URL=${MARIA_DATABASE_URL}\ - cargo test --no-fail-fast - cargo test --no-fail-fast + $(call test_db_sqlx_postgres) + $(call test_db_sqlx_maria) + $(call test_core) # ./scripts/tests.sh +test.cov.html: migrate ## Generate code coverage report in HTML format + $(call cache_bust) + cargo tarpaulin -t 1200 --out Html + +test.cov.xml: migrate ## Generate code coverage report in XML format + $(call cache_bust) + cargo tarpaulin -t 1200 --out Xml + + +test.core: ## Run all core tests + $(call test_core) + +test.db: ## Run all database driver tests + $(call test_db_sqlx_postgres) + $(call test_db_sqlx_maria) + +test.db.pg: ## Run Postgres database driver tests + $(call test_db_sqlx_postgres) + +test.db.maria: ## Run Maria database driver tests + $(call test_db_sqlx_maria) + +test.frontend: ## Run frontend tests + $(call test_frontend) + test.integration: ## run integration tests with nightwatch.js ./scripts/integration.sh -xml-test-coverage: migrate ## Generate code coverage report in XML format - $(call cache_bust) - cargo tarpaulin -t 1200 --out Xml - help: ## Prints help for targets with comments - @cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + @cat $(MAKEFILE_LIST) | grep -E '^[a-zA-Z_-].+:.*?## .*$$' | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/build.rs b/build.rs index 904577cb..19fb1449 100644 --- a/build.rs +++ b/build.rs @@ -21,7 +21,7 @@ use sqlx::types::time::OffsetDateTime; fn main() { // note: add error checking yourself. let output = Command::new("git") - .args(&["rev-parse", "HEAD"]) + .args(["rev-parse", "HEAD"]) .output() .unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); diff --git a/db/db-core/Cargo.toml b/db/db-core/Cargo.toml index 87225139..9b62a7ea 100644 --- a/db/db-core/Cargo.toml +++ b/db/db-core/Cargo.toml @@ -13,8 +13,8 @@ async-trait = "0.1.51" thiserror = "1.0.30" serde = { version = "1", features = ["derive"]} url = { version = "2.2.2", features = ["serde"] } -#libmcaptcha = { version = "0.2.2", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.2"} -libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] } +libmcaptcha = { version = "0.2.3", git = "https://github.com/mCaptcha/libmcaptcha", features = ["minimal"], default-features = false, tag = "0.2.3"} +#libmcaptcha = { branch = "master", git = "https://github.com/mCaptcha/libmcaptcha", features = ["full"] } [features] default = [] diff --git a/db/db-core/src/lib.rs b/db/db-core/src/lib.rs index 8c33d95f..ad23e77a 100644 --- a/db/db-core/src/lib.rs +++ b/db/db-core/src/lib.rs @@ -250,6 +250,81 @@ pub trait MCDatabase: std::marker::Send + std::marker::Sync + CloneSPDatabase { /// fetch PoWConfig confirms async fn fetch_confirm(&self, user: &str, key: &str) -> DBResult>; + + /// record PoW timing + async fn analysis_save( + &self, + captcha_id: &str, + d: &CreatePerformanceAnalytics, + ) -> DBResult<()>; + + /// fetch PoW analytics + async fn analytics_fetch( + &self, + captcha_id: &str, + limit: usize, + offset: usize, + ) -> DBResult>; + + /// Create psuedo ID against campaign ID to publish analytics + async fn analytics_create_psuedo_id_if_not_exists( + &self, + captcha_id: &str, + ) -> DBResult<()>; + + /// Get psuedo ID from campaign ID + async fn analytics_get_psuedo_id_from_capmaign_id( + &self, + captcha_id: &str, + ) -> DBResult; + + /// Get campaign ID from psuedo ID + async fn analytics_get_capmaign_id_from_psuedo_id( + &self, + psuedo_id: &str, + ) -> DBResult; + + /// Delete all records for campaign + async fn analytics_delete_all_records_for_campaign( + &self, + campaign_id: &str, + ) -> DBResult<()>; + + /// Get publishing status of pow analytics for captcha ID/ campaign ID + async fn analytics_captcha_is_published(&self, campaign_id: &str) -> DBResult { + match self + .analytics_get_psuedo_id_from_capmaign_id(campaign_id) + .await + { + Ok(_) => Ok(true), + Err(errors::DBError::CaptchaNotFound) => Ok(false), + Err(e) => Err(e), + } + } +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)] +/// Log Proof-of-Work CAPTCHA performance analytics +pub struct CreatePerformanceAnalytics { + /// time taken to generate proof + pub time: u32, + /// difficulty factor for which the proof was generated + pub difficulty_factor: u32, + /// worker/client type: wasm, javascript, python, etc. + pub worker_type: String, +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)] +/// Proof-of-Work CAPTCHA performance analytics +pub struct PerformanceAnalytics { + /// log ID + pub id: usize, + /// time taken to generate proof + pub time: u32, + /// difficulty factor for which the proof was generated + pub difficulty_factor: u32, + /// worker/client type: wasm, javascript, python, etc. + pub worker_type: String, } #[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq)] @@ -332,7 +407,6 @@ pub struct Secret { /// user's secret pub secret: String, } - /// Trait to clone MCDatabase pub trait CloneSPDatabase { /// clone DB diff --git a/db/db-core/src/tests.rs b/db/db-core/src/tests.rs index 2ca6786b..ceb87393 100644 --- a/db/db-core/src/tests.rs +++ b/db/db-core/src/tests.rs @@ -260,6 +260,60 @@ pub async fn database_works<'a, T: MCDatabase>( db.record_solve(c.key).await.unwrap(); db.record_confirm(c.key).await.unwrap(); + // analytics start + + db.analytics_create_psuedo_id_if_not_exists(c.key) + .await + .unwrap(); + let psuedo_id = db + .analytics_get_psuedo_id_from_capmaign_id(c.key) + .await + .unwrap(); + db.analytics_create_psuedo_id_if_not_exists(c.key) + .await + .unwrap(); + assert_eq!( + psuedo_id, + db.analytics_get_psuedo_id_from_capmaign_id(c.key) + .await + .unwrap() + ); + assert_eq!( + c.key, + db.analytics_get_capmaign_id_from_psuedo_id(&psuedo_id) + .await + .unwrap() + ); + + let analytics = CreatePerformanceAnalytics { + time: 0, + difficulty_factor: 0, + worker_type: "wasm".into(), + }; + db.analysis_save(c.key, &analytics).await.unwrap(); + let limit = 50; + let mut offset = 0; + let a = db.analytics_fetch(c.key, limit, offset).await.unwrap(); + assert_eq!(a[0].time, analytics.time); + assert_eq!(a[0].difficulty_factor, analytics.difficulty_factor); + assert_eq!(a[0].worker_type, analytics.worker_type); + offset += 1; + assert!(db + .analytics_fetch(c.key, limit, offset) + .await + .unwrap() + .is_empty()); + + db.analytics_delete_all_records_for_campaign(c.key) + .await + .unwrap(); + assert_eq!(db.analytics_fetch(c.key, 1000, 0).await.unwrap().len(), 0); + assert!(!db.analytics_captcha_is_published(c.key).await.unwrap()); + db.analytics_delete_all_records_for_campaign(c.key) + .await + .unwrap(); + // analytics end + assert_eq!(db.fetch_solve(p.username, c.key).await.unwrap().len(), 1); assert_eq!( db.fetch_config_fetched(p.username, c.key) diff --git a/db/db-sqlx-maria/Cargo.toml b/db/db-sqlx-maria/Cargo.toml index 7f419404..ab1aa8a7 100644 --- a/db/db-sqlx-maria/Cargo.toml +++ b/db/db-sqlx-maria/Cargo.toml @@ -13,6 +13,7 @@ async-trait = "0.1.51" db-core = {path = "../db-core"} futures = "0.3.15" sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "mysql", "time", "offline" ] } +uuid = { version = "1.4.0", features = ["v4", "serde"] } [dev-dependencies] actix-rt = "2" diff --git a/db/db-sqlx-maria/migrations/20230627132800_mcaptcha_pow_analytics.sql b/db/db-sqlx-maria/migrations/20230627132800_mcaptcha_pow_analytics.sql new file mode 100644 index 00000000..78180c1b --- /dev/null +++ b/db/db-sqlx-maria/migrations/20230627132800_mcaptcha_pow_analytics.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics ( + ID INT auto_increment, + PRIMARY KEY(ID), + config_id INTEGER NOT NULL, + time INTEGER NOT NULL, + difficulty_factor INTEGER NOT NULL, + worker_type VARCHAR(100) NOT NULL, + CONSTRAINT `fk_mcaptcha_config_id_pow_analytics` + FOREIGN KEY (config_id) + REFERENCES mcaptcha_config (config_id) + ON DELETE CASCADE + ON UPDATE CASCADE +); diff --git a/db/db-sqlx-maria/migrations/20230629185331_mcaptcha_psuedo_campaign_id.sql b/db/db-sqlx-maria/migrations/20230629185331_mcaptcha_psuedo_campaign_id.sql new file mode 100644 index 00000000..37c89045 --- /dev/null +++ b/db/db-sqlx-maria/migrations/20230629185331_mcaptcha_psuedo_campaign_id.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id ( + ID INT auto_increment, + PRIMARY KEY(ID), + psuedo_id varchar(100) NOT NULL UNIQUE, + config_id INT NOT NULL, + + CONSTRAINT `fk_mcaptcha_psuedo_campaign_id_config_id` + FOREIGN KEY (config_id) + REFERENCES mcaptcha_config (config_id) + ON DELETE CASCADE + ON UPDATE CASCADE + +); diff --git a/db/db-sqlx-maria/sqlx-data.json b/db/db-sqlx-maria/sqlx-data.json index 35887e6f..54e49c77 100644 --- a/db/db-sqlx-maria/sqlx-data.json +++ b/db/db-sqlx-maria/sqlx-data.json @@ -25,6 +25,31 @@ }, "query": "SELECT time FROM mcaptcha_pow_confirmed_stats \n WHERE \n config_id = (\n SELECT config_id FROM mcaptcha_config \n WHERE \n captcha_key = ?\n AND\n user_id = (\n SELECT \n ID FROM mcaptcha_users WHERE name = ?))\n ORDER BY time DESC" }, + "14dc89b2988b221fd24e4f319b1d48f5e6c65c760c30d11c9c29087f09cee23a": { + "describe": { + "columns": [ + { + "name": "captcha_key", + "ordinal": 0, + "type_info": { + "char_set": 224, + "flags": { + "bits": 4101 + }, + "max_size": 400, + "type": "VarString" + } + } + ], + "nullable": [ + false + ], + "parameters": { + "Right": 1 + } + }, + "query": "SELECT\n captcha_key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = ?\n );" + }, "22e697114c3ed5b0156cdceab11a398f1ef3a804f482e1cd948bc615ef95fc92": { "describe": { "columns": [], @@ -154,6 +179,31 @@ }, "query": "INSERT INTO mcaptcha_pow_fetched_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)" }, + "5ad1ef722a961183228d851813b9f50284520bf8cc8118c765b72c108daaf6fb": { + "describe": { + "columns": [ + { + "name": "psuedo_id", + "ordinal": 0, + "type_info": { + "char_set": 224, + "flags": { + "bits": 4101 + }, + "max_size": 400, + "type": "VarString" + } + } + ], + "nullable": [ + false + ], + "parameters": { + "Right": 1 + } + }, + "query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?));\n " + }, "5d5a106981345e9f62bc2239c00cdc683d3aaaa820d63da300dc51e3f6f363d3": { "describe": { "columns": [], @@ -164,6 +214,16 @@ }, "query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES (?, ?, ?)" }, + "6094468b7fa20043b0da90e366b7f1fa29a8c748e163b6712725440b25ae9361": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Right": 1 + } + }, + "query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) " + }, "66ec7df10484f8e0206f3c97afc9136021589556c38dbbed341d6574487f79f2": { "describe": { "columns": [ @@ -406,6 +466,80 @@ }, "query": "UPDATE mcaptcha_users set email = ?\n WHERE name = ?" }, + "9e45969a0f79eab8caba41b0d91e5e3b85a1a68a49136f89fc90793c38f00041": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Right": 2 + } + }, + "query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)),\n ?\n );" + }, + "9f10afb0f242f11c58389803c5e85e244cc59102b8929a21e3fcaa852d57a52c": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": { + "char_set": 63, + "flags": { + "bits": 515 + }, + "max_size": 11, + "type": "Long" + } + }, + { + "name": "time", + "ordinal": 1, + "type_info": { + "char_set": 63, + "flags": { + "bits": 4097 + }, + "max_size": 11, + "type": "Long" + } + }, + { + "name": "difficulty_factor", + "ordinal": 2, + "type_info": { + "char_set": 63, + "flags": { + "bits": 4097 + }, + "max_size": 11, + "type": "Long" + } + }, + { + "name": "worker_type", + "ordinal": 3, + "type_info": { + "char_set": 224, + "flags": { + "bits": 4097 + }, + "max_size": 400, + "type": "VarString" + } + } + ], + "nullable": [ + false, + false, + false, + false + ], + "parameters": { + "Right": 3 + } + }, + "query": "SELECT\n id, time, difficulty_factor, worker_type\n FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n ) \n ORDER BY ID\n LIMIT ? OFFSET ?" + }, "a89c066db044cddfdebee6a0fd0d80a5a26dcb7ecc00a9899f5634b72ea0a952": { "describe": { "columns": [ @@ -759,6 +893,16 @@ }, "query": "INSERT INTO mcaptcha_pow_solved_stats \n (config_id, time) VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?)" }, + "e4d9bf156a368dcee1433dd5ced9f1991aa15f84e0ade916433aada40f68f0aa": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Right": 1 + } + }, + "query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE captcha_key = ?\n );" + }, "e6569a6064d0e07abea4c0bd4686cdfdaac64f0109ac40efaed06a744a2eaf5e": { "describe": { "columns": [ @@ -873,6 +1017,16 @@ }, "query": "SELECT name, password FROM mcaptcha_users WHERE email = ?" }, + "f987c4568ab28271d87af47f473b18cf41130a483333e81d5f50199758cbb98b": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Right": 4 + } + }, + "query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)" + }, "fc717ff0827ccfaa1cc61a71cc7f71c348ebb03d35895c54b011c03121ad2385": { "describe": { "columns": [], diff --git a/db/db-sqlx-maria/src/lib.rs b/db/db-sqlx-maria/src/lib.rs index ed6e8cf5..0652755f 100644 --- a/db/db-sqlx-maria/src/lib.rs +++ b/db/db-sqlx-maria/src/lib.rs @@ -22,6 +22,7 @@ use sqlx::mysql::MySqlPoolOptions; use sqlx::types::time::OffsetDateTime; use sqlx::ConnectOptions; use sqlx::MySqlPool; +use uuid::Uuid; pub mod errors; #[cfg(test)] @@ -895,6 +896,191 @@ impl MCDatabase for Database { Ok(Date::dates_to_unix(records)) } + + /// record PoW timing + async fn analysis_save( + &self, + captcha_id: &str, + d: &CreatePerformanceAnalytics, + ) -> DBResult<()> { + let _ = sqlx::query!( + "INSERT INTO mcaptcha_pow_analytics + (config_id, time, difficulty_factor, worker_type) + VALUES ((SELECT config_id FROM mcaptcha_config where captcha_key= ?), ?, ?, ?)", + captcha_id, + d.time as i32, + d.difficulty_factor as i32, + &d.worker_type, + ) + .execute(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + Ok(()) + } + + /// fetch PoW analytics + async fn analytics_fetch( + &self, + captcha_id: &str, + limit: usize, + offset: usize, + ) -> DBResult> { + struct P { + id: i32, + time: i32, + difficulty_factor: i32, + worker_type: String, + } + + impl From

for PerformanceAnalytics { + fn from(v: P) -> Self { + Self { + id: v.id as usize, + time: v.time as u32, + difficulty_factor: v.difficulty_factor as u32, + worker_type: v.worker_type, + } + } + } + + let mut c = sqlx::query_as!( + P, + "SELECT + id, time, difficulty_factor, worker_type + FROM + mcaptcha_pow_analytics + WHERE + config_id = ( + SELECT config_id FROM mcaptcha_config WHERE captcha_key = ? + ) + ORDER BY ID + LIMIT ? OFFSET ?", + &captcha_id, + limit as i64, + offset as i64, + ) + .fetch_all(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + let mut res = Vec::with_capacity(c.len()); + for i in c.drain(0..) { + res.push(i.into()) + } + + Ok(res) + } + + /// Create psuedo ID against campaign ID to publish analytics + async fn analytics_create_psuedo_id_if_not_exists( + &self, + captcha_id: &str, + ) -> DBResult<()> { + let id = Uuid::new_v4(); + sqlx::query!( + " + INSERT INTO + mcaptcha_psuedo_campaign_id (config_id, psuedo_id) + VALUES ( + (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)), + ? + );", + captcha_id, + &id.to_string(), + ) + .execute(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + + Ok(()) + } + + /// Get psuedo ID from campaign ID + async fn analytics_get_psuedo_id_from_capmaign_id( + &self, + captcha_id: &str, + ) -> DBResult { + struct ID { + psuedo_id: String, + } + + let res = sqlx::query_as!( + ID, + "SELECT psuedo_id FROM + mcaptcha_psuedo_campaign_id + WHERE + config_id = (SELECT config_id FROM mcaptcha_config WHERE captcha_key = (?)); + ", + captcha_id + ).fetch_one(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + + Ok(res.psuedo_id) + } + + /// Get campaign ID from psuedo ID + async fn analytics_get_capmaign_id_from_psuedo_id( + &self, + psuedo_id: &str, + ) -> DBResult { + struct ID { + captcha_key: String, + } + + let res = sqlx::query_as!( + ID, + "SELECT + captcha_key + FROM + mcaptcha_config + WHERE + config_id = ( + SELECT + config_id + FROM + mcaptcha_psuedo_campaign_id + WHERE + psuedo_id = ? + );", + psuedo_id + ) + .fetch_one(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + Ok(res.captcha_key) + } + + async fn analytics_delete_all_records_for_campaign( + &self, + campaign_id: &str, + ) -> DBResult<()> { + let _ = sqlx::query!( + " + DELETE FROM + mcaptcha_psuedo_campaign_id + WHERE config_id = ( + SELECT config_id FROM mcaptcha_config WHERE captcha_key = ? + );", + campaign_id + ) + .execute(&self.pool) + .await; + + let _ = sqlx::query!( + " + DELETE FROM + mcaptcha_pow_analytics + WHERE + config_id = ( + SELECT config_id FROM mcaptcha_config WHERE captcha_key = ? + ) ", + campaign_id + ) + .execute(&self.pool) + .await; + + Ok(()) + } } #[derive(Clone)] diff --git a/db/db-sqlx-postgres/Cargo.toml b/db/db-sqlx-postgres/Cargo.toml index 027b7a0e..27461e7d 100644 --- a/db/db-sqlx-postgres/Cargo.toml +++ b/db/db-sqlx-postgres/Cargo.toml @@ -13,6 +13,7 @@ async-trait = "0.1.51" db-core = {path = "../db-core"} futures = "0.3.15" sqlx = { version = "0.5.13", features = [ "runtime-actix-rustls", "postgres", "time", "offline" ] } +uuid = { version = "1.4.0", features = ["v4", "serde"] } [dev-dependencies] actix-rt = "2" diff --git a/db/db-sqlx-postgres/migrations/20230627133023_mcaptcha_pow_analytics.sql b/db/db-sqlx-postgres/migrations/20230627133023_mcaptcha_pow_analytics.sql new file mode 100644 index 00000000..ad4eb5c6 --- /dev/null +++ b/db/db-sqlx-postgres/migrations/20230627133023_mcaptcha_pow_analytics.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS mcaptcha_pow_analytics ( + config_id INTEGER references mcaptcha_config(config_id) ON DELETE CASCADE, + time INTEGER NOT NULL, + difficulty_factor INTEGER NOT NULL, + worker_type VARCHAR(100) NOT NULL, + ID SERIAL PRIMARY KEY NOT NULL +); diff --git a/db/db-sqlx-postgres/migrations/20230629174617_mcaptcha_psuedo_campaign.sql b/db/db-sqlx-postgres/migrations/20230629174617_mcaptcha_psuedo_campaign.sql new file mode 100644 index 00000000..0d847dc8 --- /dev/null +++ b/db/db-sqlx-postgres/migrations/20230629174617_mcaptcha_psuedo_campaign.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS mcaptcha_psuedo_campaign_id ( + id SERIAL PRIMARY KEY NOT NULL, + config_id INTEGER NOT NULL references mcaptcha_config(config_id) ON DELETE CASCADE, + psuedo_id varchar(100) NOT NULL UNIQUE +); diff --git a/db/db-sqlx-postgres/sqlx-data.json b/db/db-sqlx-postgres/sqlx-data.json index 3b4cb791..2c3d85e3 100644 --- a/db/db-sqlx-postgres/sqlx-data.json +++ b/db/db-sqlx-postgres/sqlx-data.json @@ -1,5 +1,45 @@ { "db": "PostgreSQL", + "017576128f1c63aee062799a33f872457fe19f5d6429d0af312dc00c244b31cb": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "time", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "difficulty_factor", + "ordinal": 2, + "type_info": "Int4" + }, + { + "name": "worker_type", + "ordinal": 3, + "type_info": "Varchar" + } + ], + "nullable": [ + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int8" + ] + } + }, + "query": "SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics\n WHERE \n config_id = (\n SELECT \n config_id FROM mcaptcha_config \n WHERE \n key = $1\n )\n ORDER BY ID\n OFFSET $2 LIMIT $3\n " + }, "02deb524bb12632af9b7883975f75fdc30d6775d836aff647add1dffd1a4bc00": { "describe": { "columns": [ @@ -132,6 +172,26 @@ }, "query": "UPDATE mcaptcha_users set name = $1\n WHERE name = $2" }, + "21cdf28d8962389d22c8ddefdad82780f5316737e3d833623512aa12a54a026a": { + "describe": { + "columns": [ + { + "name": "key", + "ordinal": 0, + "type_info": "Varchar" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Text" + ] + } + }, + "query": "SELECT\n key\n FROM\n mcaptcha_config\n WHERE\n config_id = (\n SELECT\n config_id\n FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n psuedo_id = $1\n );" + }, "2b319a202bb983d5f28979d1e371f399125da1122fbda36a5a55b75b9c743451": { "describe": { "columns": [], @@ -180,6 +240,18 @@ }, "query": "SELECT email FROM mcaptcha_users WHERE name = $1" }, + "30d8945806b4c68b6da800395f61c1e480839093bfcda9c693bf1972a65c7d79": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text" + ] + } + }, + "query": "\n DELETE FROM\n mcaptcha_psuedo_campaign_id\n WHERE config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = ($1)\n );" + }, "3b1c8128fc48b16d8e8ea6957dd4fbc0eb19ae64748fd7824e9f5e1901dd1726": { "describe": { "columns": [], @@ -406,6 +478,26 @@ }, "query": "INSERT INTO mcaptcha_users \n (name , password, secret) VALUES ($1, $2, $3)" }, + "839dfdfc3543b12128cb2b44bf356cd81f3da380963e5684ec3624a0ea4f9547": { + "describe": { + "columns": [ + { + "name": "psuedo_id", + "ordinal": 0, + "type_info": "Varchar" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Text" + ] + } + }, + "query": "SELECT psuedo_id FROM\n mcaptcha_psuedo_campaign_id\n WHERE\n config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1));\n " + }, "84484cb6892db29121816bc5bff5702b9e857e20aa14e79d080d78ae7593153b": { "describe": { "columns": [ @@ -493,6 +585,33 @@ }, "query": "SELECT EXISTS (SELECT 1 from mcaptcha_users WHERE name = $1)" }, + "af47990880a92c63d1cf5192203899c72621479dc6bb47859fb4498264b78033": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Int4", + "Int4", + "Varchar" + ] + } + }, + "query": "INSERT INTO mcaptcha_pow_analytics \n (config_id, time, difficulty_factor, worker_type)\n VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)" + }, + "b67da576ff30a1bc8b1c0a79eff07f0622bd9ea035d3de15b91f5e1e8a5fda9b": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text" + ] + } + }, + "query": "\n DELETE FROM\n mcaptcha_pow_analytics\n WHERE\n config_id = (\n SELECT config_id FROM mcaptcha_config WHERE key = $1\n )\n " + }, "b97d810814fbeb2df19f47bcfa381bc6fb7ac6832d040b377cf4fca2ca896cfb": { "describe": { "columns": [], @@ -545,6 +664,19 @@ }, "query": "SELECT name, password FROM mcaptcha_users WHERE email = ($1)" }, + "c1bb8e02d1f9dc28322309d055de3c40ed4e1a1b9453a7e5a93a70e5186d762d": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Varchar" + ] + } + }, + "query": "\n INSERT INTO\n mcaptcha_psuedo_campaign_id (config_id, psuedo_id)\n VALUES (\n (SELECT config_id FROM mcaptcha_config WHERE key = ($1)),\n $2\n );" + }, "c2e167e56242de7e0a835e25004b15ca8340545fa0ca7ac8f3293157d2d03d98": { "describe": { "columns": [ diff --git a/db/db-sqlx-postgres/src/lib.rs b/db/db-sqlx-postgres/src/lib.rs index 752026ca..2ee8f85c 100644 --- a/db/db-sqlx-postgres/src/lib.rs +++ b/db/db-sqlx-postgres/src/lib.rs @@ -22,6 +22,7 @@ use sqlx::postgres::PgPoolOptions; use sqlx::types::time::OffsetDateTime; use sqlx::ConnectOptions; use sqlx::PgPool; +use uuid::Uuid; pub mod errors; #[cfg(test)] @@ -901,6 +902,194 @@ impl MCDatabase for Database { Ok(Date::dates_to_unix(records)) } + + /// record PoW timing + async fn analysis_save( + &self, + captcha_id: &str, + d: &CreatePerformanceAnalytics, + ) -> DBResult<()> { + let _ = sqlx::query!( + "INSERT INTO mcaptcha_pow_analytics + (config_id, time, difficulty_factor, worker_type) + VALUES ((SELECT config_id FROM mcaptcha_config WHERE key = $1), $2, $3, $4)", + captcha_id, + d.time as i32, + d.difficulty_factor as i32, + &d.worker_type, + ) + .execute(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + Ok(()) + } + + /// fetch PoW analytics + async fn analytics_fetch( + &self, + captcha_id: &str, + limit: usize, + offset: usize, + ) -> DBResult> { + struct P { + id: i32, + time: i32, + difficulty_factor: i32, + worker_type: String, + } + + impl From

for PerformanceAnalytics { + fn from(v: P) -> Self { + Self { + time: v.time as u32, + difficulty_factor: v.difficulty_factor as u32, + worker_type: v.worker_type, + id: v.id as usize, + } + } + } + + let mut c = sqlx::query_as!( + P, + "SELECT id, time, difficulty_factor, worker_type FROM mcaptcha_pow_analytics + WHERE + config_id = ( + SELECT + config_id FROM mcaptcha_config + WHERE + key = $1 + ) + ORDER BY ID + OFFSET $2 LIMIT $3 + ", + &captcha_id, + offset as i32, + limit as i32 + ) + .fetch_all(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + let mut res = Vec::with_capacity(c.len()); + for i in c.drain(0..) { + res.push(i.into()) + } + + Ok(res) + } + + /// Create psuedo ID against campaign ID to publish analytics + async fn analytics_create_psuedo_id_if_not_exists( + &self, + captcha_id: &str, + ) -> DBResult<()> { + let id = Uuid::new_v4(); + sqlx::query!( + " + INSERT INTO + mcaptcha_psuedo_campaign_id (config_id, psuedo_id) + VALUES ( + (SELECT config_id FROM mcaptcha_config WHERE key = ($1)), + $2 + );", + captcha_id, + &id.to_string(), + ) + .execute(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + + Ok(()) + } + + /// Get psuedo ID from campaign ID + async fn analytics_get_psuedo_id_from_capmaign_id( + &self, + captcha_id: &str, + ) -> DBResult { + struct ID { + psuedo_id: String, + } + + let res = sqlx::query_as!( + ID, + "SELECT psuedo_id FROM + mcaptcha_psuedo_campaign_id + WHERE + config_id = (SELECT config_id FROM mcaptcha_config WHERE key = ($1)); + ", + captcha_id + ) + .fetch_one(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + + Ok(res.psuedo_id) + } + + /// Get campaign ID from psuedo ID + async fn analytics_get_capmaign_id_from_psuedo_id( + &self, + psuedo_id: &str, + ) -> DBResult { + struct ID { + key: String, + } + + let res = sqlx::query_as!( + ID, + "SELECT + key + FROM + mcaptcha_config + WHERE + config_id = ( + SELECT + config_id + FROM + mcaptcha_psuedo_campaign_id + WHERE + psuedo_id = $1 + );", + psuedo_id + ) + .fetch_one(&self.pool) + .await + .map_err(|e| map_row_not_found_err(e, DBError::CaptchaNotFound))?; + Ok(res.key) + } + + async fn analytics_delete_all_records_for_campaign( + &self, + campaign_id: &str, + ) -> DBResult<()> { + let _ = sqlx::query!( + " + DELETE FROM + mcaptcha_psuedo_campaign_id + WHERE config_id = ( + SELECT config_id FROM mcaptcha_config WHERE key = ($1) + );", + campaign_id + ) + .execute(&self.pool) + .await; + + let _ = sqlx::query!( + " + DELETE FROM + mcaptcha_pow_analytics + WHERE + config_id = ( + SELECT config_id FROM mcaptcha_config WHERE key = $1 + ) + ", + campaign_id + ) + .execute(&self.pool) + .await; + + Ok(()) + } } #[derive(Clone)] diff --git a/src/api/v1/mcaptcha/create.rs b/src/api/v1/mcaptcha/create.rs index 2e882500..743fb831 100644 --- a/src/api/v1/mcaptcha/create.rs +++ b/src/api/v1/mcaptcha/create.rs @@ -31,6 +31,7 @@ pub struct CreateCaptcha { pub levels: Vec, pub duration: u32, pub description: String, + pub publish_benchmarks: bool, } #[derive(Clone, Debug, Deserialize, Serialize)] @@ -52,6 +53,11 @@ pub async fn create( ) -> ServiceResult { let username = id.identity().unwrap(); let mcaptcha_config = runner::create(&payload, &data, &username).await?; + if payload.publish_benchmarks { + data.db + .analytics_create_psuedo_id_if_not_exists(&mcaptcha_config.key) + .await?; + } Ok(HttpResponse::Ok().json(mcaptcha_config)) } diff --git a/src/api/v1/mcaptcha/easy.rs b/src/api/v1/mcaptcha/easy.rs index 56584dde..9707dfc0 100644 --- a/src/api/v1/mcaptcha/easy.rs +++ b/src/api/v1/mcaptcha/easy.rs @@ -60,6 +60,9 @@ pub struct TrafficPatternRequest { pub broke_my_site_traffic: Option, /// Captcha description pub description: String, + + /// publish benchmarks + pub publish_benchmarks: bool, } impl From<&TrafficPatternRequest> for TrafficPattern { @@ -127,12 +130,14 @@ async fn create( levels, duration: data.settings.captcha.default_difficulty_strategy.duration, description: payload.description, + publish_benchmarks: payload.publish_benchmarks, }; let mcaptcha_config = create_runner(&msg, &data, &username).await?; data.db .add_traffic_pattern(&username, &mcaptcha_config.key, &pattern) .await?; + Ok(HttpResponse::Ok().json(mcaptcha_config)) } @@ -162,6 +167,7 @@ async fn update( duration: data.settings.captcha.default_difficulty_strategy.duration, description: payload.pattern.description, key: payload.key, + publish_benchmarks: payload.pattern.publish_benchmarks, }; update_captcha_runner(&msg, &data, &username).await?; @@ -292,6 +298,7 @@ pub mod tests { peak_sustainable_traffic: 1_000_000, broke_my_site_traffic: Some(10_000_000), description: NAME.into(), + publish_benchmarks: false, }; let default_levels = calculate( @@ -323,6 +330,11 @@ pub mod tests { assert_eq!(get_level_resp.status(), StatusCode::OK); let res_levels: Vec = test::read_body_json(get_level_resp).await; assert_eq!(res_levels, default_levels); + assert!(!data + .db + .analytics_captcha_is_published(&token_key.key) + .await + .unwrap()); // END create_easy // START update_easy @@ -331,6 +343,7 @@ pub mod tests { peak_sustainable_traffic: 10_000, broke_my_site_traffic: Some(1_000_000), description: NAME.into(), + publish_benchmarks: true, }; let updated_default_values = calculate( @@ -352,6 +365,11 @@ pub mod tests { ) .await; assert_eq!(update_token_resp.status(), StatusCode::OK); + assert!(data + .db + .analytics_captcha_is_published(&token_key.key) + .await + .unwrap()); let get_level_resp = test::call_service( &app, @@ -394,5 +412,52 @@ pub mod tests { )); assert!(body.contains(&payload.pattern.avg_traffic.to_string())); assert!(body.contains(&payload.pattern.peak_sustainable_traffic.to_string())); + + // START update_easy to delete published results + let mut payload2 = TrafficPatternRequest { + avg_traffic: 100_000, + peak_sustainable_traffic: 1_000_000, + broke_my_site_traffic: Some(10_000_000), + description: NAME.into(), + publish_benchmarks: true, + }; + + let add_token_resp = test::call_service( + &app, + post_request!(&payload2, ROUTES.captcha.easy.create) + .cookie(cookies.clone()) + .to_request(), + ) + .await; + assert_eq!(add_token_resp.status(), StatusCode::OK); + + assert!(data + .db + .analytics_captcha_is_published(&token_key.key) + .await + .unwrap()); + + let token_key2: MCaptchaDetails = test::read_body_json(add_token_resp).await; + + payload2.publish_benchmarks = false; + + let payload = UpdateTrafficPattern { + pattern: payload2, + key: token_key2.key.clone(), + }; + + let update_token_resp = test::call_service( + &app, + post_request!(&payload, ROUTES.captcha.easy.update) + .cookie(cookies.clone()) + .to_request(), + ) + .await; + assert_eq!(update_token_resp.status(), StatusCode::OK); + assert!(!data + .db + .analytics_captcha_is_published(&token_key2.key) + .await + .unwrap()); } } diff --git a/src/api/v1/mcaptcha/test.rs b/src/api/v1/mcaptcha/test.rs index 9019a042..2b1d43ff 100644 --- a/src/api/v1/mcaptcha/test.rs +++ b/src/api/v1/mcaptcha/test.rs @@ -82,6 +82,7 @@ pub async fn level_routes_work(data: ArcData) { levels: levels.clone(), description: add_level.description, duration: add_level.duration, + publish_benchmarks: true, }; let add_token_resp = test::call_service( diff --git a/src/api/v1/mcaptcha/update.rs b/src/api/v1/mcaptcha/update.rs index 198cc8de..0416338d 100644 --- a/src/api/v1/mcaptcha/update.rs +++ b/src/api/v1/mcaptcha/update.rs @@ -76,6 +76,7 @@ pub struct UpdateCaptcha { pub duration: u32, pub description: String, pub key: String, + pub publish_benchmarks: bool, } #[my_codegen::post( @@ -139,6 +140,16 @@ pub mod runner { e ); } + + if payload.publish_benchmarks { + data.db + .analytics_create_psuedo_id_if_not_exists(&payload.key) + .await?; + } else { + data.db + .analytics_delete_all_records_for_campaign(&payload.key) + .await?; + } Ok(()) } } diff --git a/src/api/v1/pow/get_config.rs b/src/api/v1/pow/get_config.rs index 13c381d5..131c56ec 100644 --- a/src/api/v1/pow/get_config.rs +++ b/src/api/v1/pow/get_config.rs @@ -109,8 +109,8 @@ pub async fn init_mcaptcha(data: &AppData, key: &str) -> ServiceResult<()> { for level in levels.iter() { let level = LevelBuilder::default() - .visitor_threshold(level.visitor_threshold as u32) - .difficulty_factor(level.difficulty_factor as u32) + .visitor_threshold(level.visitor_threshold) + .difficulty_factor(level.difficulty_factor) .unwrap() .build() .unwrap(); @@ -250,6 +250,7 @@ pub mod tests { levels: levels.into(), duration: 30, description: "dummy".into(), + publish_benchmarks: true, }; // 1. add level @@ -267,11 +268,11 @@ pub mod tests { key: token_key.key.clone(), }; - let url = V1_API_ROUTES.pow.get_config; + let _url = V1_API_ROUTES.pow.get_config; let mut prev = 0; for (count, l) in levels.iter().enumerate() { - for l in prev..l.visitor_threshold * 2 { - let get_config_resp = test::call_service( + for _l in prev..l.visitor_threshold * 2 { + let _get_config_resp = test::call_service( &app, post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config) .to_request(), diff --git a/src/api/v1/pow/verify_pow.rs b/src/api/v1/pow/verify_pow.rs index 94b9b038..f7efd541 100644 --- a/src/api/v1/pow/verify_pow.rs +++ b/src/api/v1/pow/verify_pow.rs @@ -32,6 +32,27 @@ pub struct ValidationToken { pub token: String, } +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct ApiWork { + pub string: String, + pub result: String, + pub nonce: u64, + pub key: String, + pub time: Option, + pub worker_type: Option, +} + +impl From for Work { + fn from(value: ApiWork) -> Self { + Self { + string: value.string, + nonce: value.nonce, + result: value.result, + key: value.key, + } + } +} + // API keys are mcaptcha actor names /// route handler that verifies PoW and issues a solution token @@ -39,7 +60,7 @@ pub struct ValidationToken { #[my_codegen::post(path = "V1_API_ROUTES.pow.verify_pow()")] pub async fn verify_pow( req: HttpRequest, - payload: web::Json, + payload: web::Json, data: AppData, ) -> ServiceResult { #[cfg(not(test))] @@ -52,8 +73,19 @@ pub async fn verify_pow( let ip = "127.0.1.1".into(); let key = payload.key.clone(); - let res = data.captcha.verify_pow(payload.into_inner(), ip).await?; + let payload = payload.into_inner(); + let worker_type = payload.worker_type.clone(); + let time = payload.time; + let (res, difficulty_factor) = data.captcha.verify_pow(payload.into(), ip).await?; data.stats.record_solve(&data, &key).await?; + if time.is_some() && worker_type.is_some() { + let analytics = db_core::CreatePerformanceAnalytics { + difficulty_factor, + time: time.unwrap(), + worker_type: worker_type.unwrap(), + }; + data.db.analysis_save(&key, &analytics).await?; + } let payload = ValidationToken { token: res }; Ok(HttpResponse::Ok().json(payload)) } @@ -81,6 +113,81 @@ pub mod tests { verify_pow_works(data).await; } + #[actix_rt::test] + async fn verify_analytics_pow_works_pg() { + let data = crate::tests::pg::get_data().await; + verify_analytics_pow_works(data).await; + } + + #[actix_rt::test] + async fn verify_analytics_pow_works_maria() { + let data = crate::tests::maria::get_data().await; + verify_analytics_pow_works(data).await; + } + + pub async fn verify_analytics_pow_works(data: ArcData) { + const NAME: &str = "powanalyticsuser"; + const PASSWORD: &str = "testingpas"; + const EMAIL: &str = "powanalyticsuser@a.com"; + let data = &data; + + delete_user(data, NAME).await; + + register_and_signin(data, NAME, EMAIL, PASSWORD).await; + let (_, _signin_resp, token_key) = add_levels_util(data, NAME, PASSWORD).await; + let app = get_app!(data).await; + + let get_config_payload = GetConfigPayload { + key: token_key.key.clone(), + }; + + // update and check changes + + let get_config_resp = test::call_service( + &app, + post_request!(&get_config_payload, V1_API_ROUTES.pow.get_config) + .to_request(), + ) + .await; + assert_eq!(get_config_resp.status(), StatusCode::OK); + let config: PoWConfig = test::read_body_json(get_config_resp).await; + + let pow = pow_sha256::ConfigBuilder::default() + .salt(config.salt) + .build() + .unwrap(); + let work = pow + .prove_work(&config.string.clone(), config.difficulty_factor) + .unwrap(); + + let work = ApiWork { + string: config.string.clone(), + result: work.result, + nonce: work.nonce, + key: token_key.key.clone(), + time: Some(100), + worker_type: Some("wasm".into()), + }; + + let pow_verify_resp = test::call_service( + &app, + post_request!(&work, V1_API_ROUTES.pow.verify_pow).to_request(), + ) + .await; + assert_eq!(pow_verify_resp.status(), StatusCode::OK); + let limit = 50; + let offset = 0; + let mut analytics = data + .db + .analytics_fetch(&token_key.key, limit, offset) + .await + .unwrap(); + assert_eq!(analytics.len(), 1); + let a = analytics.pop().unwrap(); + assert_eq!(a.time, work.time.unwrap()); + assert_eq!(a.worker_type, work.worker_type.unwrap()); + } + pub async fn verify_pow_works(data: ArcData) { const NAME: &str = "powverifyusr"; const PASSWORD: &str = "testingpas"; @@ -129,6 +236,12 @@ pub mod tests { ) .await; assert_eq!(pow_verify_resp.status(), StatusCode::OK); + assert!(data + .db + .analytics_fetch(&token_key.key, 50, 0) + .await + .unwrap() + .is_empty()); let string_not_found = test::call_service( &app, diff --git a/src/data.rs b/src/data.rs index 4aaf5031..6a9b54e8 100644 --- a/src/data.rs +++ b/src/data.rs @@ -83,7 +83,11 @@ impl SystemGroup { enum_system_wrapper!(get_pow, String, CaptchaResult>); // utility function to verify [Work] - pub async fn verify_pow(&self, msg: Work, ip: String) -> CaptchaResult { + pub async fn verify_pow( + &self, + msg: Work, + ip: String, + ) -> CaptchaResult<(String, u32)> { match self { Self::Embedded(val) => val.verify_pow(msg, ip).await, Self::Redis(val) => val.verify_pow(msg, ip).await, @@ -203,9 +207,9 @@ impl Data { }; let stats: Box = if s.captcha.enable_stats { - Box::new(Real::default()) + Box::::default() } else { - Box::new(Dummy::default()) + Box::::default() }; let data = Data { diff --git a/src/demo.rs b/src/demo.rs index 46fafcca..5b4e641a 100644 --- a/src/demo.rs +++ b/src/demo.rs @@ -132,7 +132,7 @@ mod tests { let duration = Duration::from_secs(DURATION); // register works - let _ = DemoUser::register_demo_user(&data).await.unwrap(); + DemoUser::register_demo_user(&data).await.unwrap(); let payload = AccountCheckPayload { val: DEMO_USER.into(), }; diff --git a/src/pages/panel/sitekey/edit.rs b/src/pages/panel/sitekey/edit.rs index 0f0e54cd..43cf46f6 100644 --- a/src/pages/panel/sitekey/edit.rs +++ b/src/pages/panel/sitekey/edit.rs @@ -35,15 +35,22 @@ struct AdvanceEditPage { name: String, key: String, levels: Vec, + publish_benchmarks: bool, } impl AdvanceEditPage { - fn new(config: Captcha, levels: Vec, key: String) -> Self { + fn new( + config: Captcha, + levels: Vec, + key: String, + publish_benchmarks: bool, + ) -> Self { AdvanceEditPage { duration: config.duration as u32, name: config.description, levels, key, + publish_benchmarks, } } } @@ -63,8 +70,9 @@ pub async fn advance( let config = data.db.get_captcha_config(&username, &key).await?; let levels = data.db.get_captcha_levels(Some(&username), &key).await?; + let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?; - let body = AdvanceEditPage::new(config, levels, key) + let body = AdvanceEditPage::new(config, levels, key, publish_benchmarks) .render_once() .unwrap(); Ok(HttpResponse::Ok() @@ -106,11 +114,14 @@ pub async fn easy( match data.db.get_traffic_pattern(&username, &key).await { Ok(c) => { let config = data.db.get_captcha_config(&username, &key).await?; + let publish_benchmarks = + data.db.analytics_captcha_is_published(&key).await?; let pattern = TrafficPatternRequest { - peak_sustainable_traffic: c.peak_sustainable_traffic as u32, - avg_traffic: c.avg_traffic as u32, - broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n as u32), + peak_sustainable_traffic: c.peak_sustainable_traffic, + avg_traffic: c.avg_traffic, + broke_my_site_traffic: c.broke_my_site_traffic.map(|n| n), description: config.description, + publish_benchmarks, }; let page = EasyEditPage::new(key, pattern).render_once().unwrap(); diff --git a/src/pages/panel/sitekey/view.rs b/src/pages/panel/sitekey/view.rs index 4b1865a5..d83faf88 100644 --- a/src/pages/panel/sitekey/view.rs +++ b/src/pages/panel/sitekey/view.rs @@ -36,6 +36,7 @@ struct IndexPage { key: String, levels: Vec, stats: CaptchaStats, + publish_benchmarks: bool, } impl IndexPage { @@ -44,6 +45,7 @@ impl IndexPage { config: Captcha, levels: Vec, key: String, + publish_benchmarks: bool, ) -> Self { IndexPage { duration: config.duration as u32, @@ -51,6 +53,7 @@ impl IndexPage { levels, key, stats, + publish_benchmarks, } } } @@ -70,8 +73,9 @@ pub async fn view_sitekey( let config = data.db.get_captcha_config(&username, &key).await?; let levels = data.db.get_captcha_levels(Some(&username), &key).await?; let stats = data.stats.fetch(&data, &username, &key).await?; + let publish_benchmarks = data.db.analytics_captcha_is_published(&key).await?; - let body = IndexPage::new(stats, config, levels, key) + let body = IndexPage::new(stats, config, levels, key, publish_benchmarks) .render_once() .unwrap(); Ok(HttpResponse::Ok() diff --git a/src/settings.rs b/src/settings.rs index 09fab199..133cc5e3 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -19,7 +19,7 @@ use std::{env, fs}; use config::{Config, ConfigError, Environment, File}; use derive_more::Display; -use log::{debug, warn}; + use serde::{Deserialize, Serialize}; use url::Url; @@ -191,7 +191,7 @@ impl Settings { .unwrap(); log::info!("Overriding [database].url and [database].database_type with environment variable"); } - Err(e) => { + Err(_e) => { set_database_url(&mut s); } } diff --git a/src/tests/mod.rs b/src/tests/mod.rs index 9de70cf9..f771417f 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -52,8 +52,8 @@ pub mod pg { settings.captcha.runners = Some(1); settings.database.url = url.clone(); settings.database.database_type = DBType::Postgres; - let data = Data::new(&settings).await; - data + + Data::new(&settings).await } } pub mod maria { @@ -71,8 +71,8 @@ pub mod maria { settings.captcha.runners = Some(1); settings.database.url = url.clone(); settings.database.database_type = DBType::Maria; - let data = Data::new(&settings).await; - data + + Data::new(&settings).await } } //pub async fn get_data() -> ArcData { @@ -118,7 +118,7 @@ macro_rules! get_app { .wrap(actix_middleware::NormalizePath::new( actix_middleware::TrailingSlash::Trim, )) - .configure(crate::routes::services), + .configure($crate::routes::services), ) }; ($data:expr) => { @@ -262,5 +262,6 @@ pub fn get_level_data() -> CreateCaptcha { levels, duration: 30, description: "dummy".into(), + publish_benchmarks: false, } } diff --git a/templates/panel/sitekey/add/advance/form.html b/templates/panel/sitekey/add/advance/form.html index 69f18085..060867ff 100644 --- a/templates/panel/sitekey/add/advance/form.html +++ b/templates/panel/sitekey/add/advance/form.html @@ -45,5 +45,16 @@ <. } .> <. } .> + + + diff --git a/templates/panel/sitekey/add/advance/ts/form/index.ts b/templates/panel/sitekey/add/advance/ts/form/index.ts index 2140e421..0bb34c49 100644 --- a/templates/panel/sitekey/add/advance/ts/form/index.ts +++ b/templates/panel/sitekey/add/advance/ts/form/index.ts @@ -38,6 +38,13 @@ export const addSubmitEventListener = (): void => const submit = async (e: Event) => { e.preventDefault(); + + const PUBLISH_BENCHMARKS = ( + FORM.querySelector("#publish_benchmarks") + ); + + + const description = validateDescription(e); const duration = validateDuration(); @@ -50,6 +57,7 @@ const submit = async (e: Event) => { levels: levels, duration, description, + publish_benchmarks: PUBLISH_BENCHMARKS.checked, }; console.debug(`[form submition] json payload: ${JSON.stringify(payload)}`); diff --git a/templates/panel/sitekey/add/novice/form.html b/templates/panel/sitekey/add/novice/form.html index 141fb8bb..c86a73f8 100644 --- a/templates/panel/sitekey/add/novice/form.html +++ b/templates/panel/sitekey/add/novice/form.html @@ -23,6 +23,7 @@ /> +