feat: add syncing-server package
This commit is contained in:
parent
923fe2a19b
commit
6cc4ef90db
172 changed files with 10489 additions and 25 deletions
170
.github/workflows/syncing-server.release.dev.yml
vendored
Normal file
170
.github/workflows/syncing-server.release.dev.yml
vendored
Normal file
|
@ -0,0 +1,170 @@
|
|||
name: Syncing Server Dev
|
||||
|
||||
concurrency:
|
||||
group: syncing_server_dev_environment
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '@standardnotes/syncing-server@[0-9]*.[0-9]*.[0-9]*-alpha.[0-9]*'
|
||||
- '@standardnotes/syncing-server@[0-9]*.[0-9]*.[0-9]*-beta.[0-9]*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.x'
|
||||
- run: yarn lint:syncing-server
|
||||
- run: yarn test:syncing-server
|
||||
|
||||
publish-aws-ecr:
|
||||
needs: test
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: cp .env.sample .env
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
- name: Login to Amazon ECR
|
||||
id: login-ecr
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
- name: Build, tag, and push image to Amazon ECR
|
||||
id: build-image
|
||||
env:
|
||||
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
|
||||
ECR_REPOSITORY: syncing-server-js
|
||||
IMAGE_TAG: ${{ github.sha }}
|
||||
run: |
|
||||
yarn docker build @standardnotes/syncing-server -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
|
||||
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
|
||||
docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:dev
|
||||
docker push $ECR_REGISTRY/$ECR_REPOSITORY:dev
|
||||
|
||||
publish-docker-hub:
|
||||
needs: test
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: cp .env.sample .env
|
||||
- name: Publish to Registry
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
with:
|
||||
name: standardnotes/syncing-server-js
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
tags: "dev,${{ github.sha }}"
|
||||
|
||||
deploy-web:
|
||||
needs: publish-aws-ecr
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition syncing-server-js-dev --query taskDefinition > task-definition.json
|
||||
- name: Fill in the new version in the Amazon ECS task definition
|
||||
run: |
|
||||
jq '(.containerDefinitions[] | select(.name=="syncing-server-js-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: syncing-server-js-dev
|
||||
image: ${{ secrets.AWS_ECR_REGISTRY }}/syncing-server-js:${{ github.sha }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: syncing-server-js-dev
|
||||
cluster: dev
|
||||
wait-for-service-stability: true
|
||||
|
||||
deploy-worker:
|
||||
needs: publish-aws-ecr
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
- name: Download task definition
|
||||
run: |
|
||||
aws ecs describe-task-definition --task-definition syncing-server-js-worker-dev --query taskDefinition > task-definition.json
|
||||
- name: Fill in the new version in the Amazon ECS task definition
|
||||
run: |
|
||||
jq '(.containerDefinitions[] | select(.name=="syncing-server-js-worker-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json
|
||||
- name: Fill in the new image ID in the Amazon ECS task definition
|
||||
id: task-def
|
||||
uses: aws-actions/amazon-ecs-render-task-definition@v1
|
||||
with:
|
||||
task-definition: task-definition.json
|
||||
container-name: syncing-server-js-worker-dev
|
||||
image: ${{ secrets.AWS_ECR_REGISTRY }}/syncing-server-js:${{ github.sha }}
|
||||
- name: Deploy Amazon ECS task definition
|
||||
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
|
||||
with:
|
||||
task-definition: ${{ steps.task-def.outputs.task-definition }}
|
||||
service: syncing-server-js-worker-dev
|
||||
cluster: dev
|
||||
wait-for-service-stability: true
|
||||
|
||||
newrelic:
|
||||
needs: [ deploy-web, deploy-worker ]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create New Relic deployment marker for Web
|
||||
uses: newrelic/deployment-marker-action@v1
|
||||
with:
|
||||
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
|
||||
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
|
||||
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WEB_DEV }}
|
||||
revision: "${{ github.sha }}"
|
||||
description: "Automated Deployment via Github Actions"
|
||||
user: "${{ github.actor }}"
|
||||
- name: Create New Relic deployment marker for Worker
|
||||
uses: newrelic/deployment-marker-action@v1
|
||||
with:
|
||||
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
|
||||
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
|
||||
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WORKER_DEV }}
|
||||
revision: "${{ github.sha }}"
|
||||
description: "Automated Deployment via Github Actions"
|
||||
user: "${{ github.actor }}"
|
||||
|
||||
notify_discord:
|
||||
needs: [ deploy-web, deploy-worker ]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Run Discord Webhook
|
||||
uses: johnnyhuy/actions-discord-git-webhook@main
|
||||
with:
|
||||
webhook_url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
225
.pnp.cjs
generated
225
.pnp.cjs
generated
|
@ -27,6 +27,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
{\
|
||||
"name": "@standardnotes/scheduler-server",\
|
||||
"reference": "workspace:packages/scheduler"\
|
||||
},\
|
||||
{\
|
||||
"name": "@standardnotes/syncing-server",\
|
||||
"reference": "workspace:packages/syncing-server"\
|
||||
}\
|
||||
],\
|
||||
"enableTopLevelFallback": true,\
|
||||
|
@ -34,7 +38,8 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"fallbackExclusionList": [\
|
||||
["@standardnotes/auth-server", ["workspace:packages/auth"]],\
|
||||
["@standardnotes/scheduler-server", ["workspace:packages/scheduler"]],\
|
||||
["@standardnotes/server-monorepo", ["workspace:."]]\
|
||||
["@standardnotes/server-monorepo", ["workspace:."]],\
|
||||
["@standardnotes/syncing-server", ["workspace:packages/syncing-server"]]\
|
||||
],\
|
||||
"fallbackPool": [\
|
||||
],\
|
||||
|
@ -2646,6 +2651,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@standardnotes/payloads", [\
|
||||
["npm:1.5.1", {\
|
||||
"packageLocation": "./.yarn/cache/@standardnotes-payloads-npm-1.5.1-45dffe2f5c-65c28421e7.zip/node_modules/@standardnotes/payloads/",\
|
||||
"packageDependencies": [\
|
||||
["@standardnotes/payloads", "npm:1.5.1"],\
|
||||
["@standardnotes/common", "npm:1.23.0"],\
|
||||
["@standardnotes/features", "npm:1.45.5"],\
|
||||
["@standardnotes/utils", "npm:1.6.11"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@standardnotes/responses", [\
|
||||
["npm:1.6.36", {\
|
||||
"packageLocation": "./.yarn/cache/@standardnotes-responses-npm-1.6.36-d245f42de1-bb78a2cefa.zip/node_modules/@standardnotes/responses/",\
|
||||
|
@ -2765,7 +2782,73 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@standardnotes/syncing-server", [\
|
||||
["workspace:packages/syncing-server", {\
|
||||
"packageLocation": "./packages/syncing-server/",\
|
||||
"packageDependencies": [\
|
||||
["@standardnotes/syncing-server", "workspace:packages/syncing-server"],\
|
||||
["@newrelic/native-metrics", "npm:7.0.2"],\
|
||||
["@newrelic/winston-enricher", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:2.1.2"],\
|
||||
["@sentry/node", "npm:6.19.7"],\
|
||||
["@standardnotes/analytics", "npm:1.6.0"],\
|
||||
["@standardnotes/auth", "npm:3.19.3"],\
|
||||
["@standardnotes/common", "npm:1.23.0"],\
|
||||
["@standardnotes/domain-events", "npm:2.32.2"],\
|
||||
["@standardnotes/domain-events-infra", "npm:1.5.2"],\
|
||||
["@standardnotes/payloads", "npm:1.5.1"],\
|
||||
["@standardnotes/responses", "npm:1.6.36"],\
|
||||
["@standardnotes/settings", "npm:1.14.3"],\
|
||||
["@standardnotes/time", "npm:1.6.9"],\
|
||||
["@types/cors", "npm:2.8.12"],\
|
||||
["@types/dotenv", "npm:8.2.0"],\
|
||||
["@types/express", "npm:4.17.13"],\
|
||||
["@types/inversify-express-utils", "npm:2.0.0"],\
|
||||
["@types/ioredis", "npm:4.28.10"],\
|
||||
["@types/jest", "npm:28.1.3"],\
|
||||
["@types/jsonwebtoken", "npm:8.5.8"],\
|
||||
["@types/newrelic", "npm:7.0.3"],\
|
||||
["@types/prettyjson", "npm:0.0.29"],\
|
||||
["@types/ua-parser-js", "npm:0.7.36"],\
|
||||
["@types/uuid", "npm:8.3.4"],\
|
||||
["@typescript-eslint/eslint-plugin", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:5.29.0"],\
|
||||
["aws-sdk", "npm:2.1159.0"],\
|
||||
["axios", "npm:0.24.0"],\
|
||||
["cors", "npm:2.8.5"],\
|
||||
["dotenv", "npm:8.2.0"],\
|
||||
["eslint", "npm:8.18.0"],\
|
||||
["eslint-plugin-prettier", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:4.0.0"],\
|
||||
["express", "npm:4.17.1"],\
|
||||
["helmet", "npm:4.3.1"],\
|
||||
["inversify", "npm:6.0.1"],\
|
||||
["inversify-express-utils", "npm:6.4.3"],\
|
||||
["ioredis", "npm:5.0.6"],\
|
||||
["jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.1.1"],\
|
||||
["jsonwebtoken", "npm:8.5.1"],\
|
||||
["mysql2", "npm:2.3.3"],\
|
||||
["newrelic", "npm:8.6.0"],\
|
||||
["nodemon", "npm:2.0.7"],\
|
||||
["prettyjson", "npm:1.2.1"],\
|
||||
["reflect-metadata", "npm:0.1.13"],\
|
||||
["ts-jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.0.5"],\
|
||||
["typeorm", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:0.3.6"],\
|
||||
["ua-parser-js", "npm:1.0.2"],\
|
||||
["uuid", "npm:8.3.2"],\
|
||||
["winston", "npm:3.3.3"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}]\
|
||||
]],\
|
||||
["@standardnotes/time", [\
|
||||
["npm:1.6.9", {\
|
||||
"packageLocation": "./.yarn/cache/@standardnotes-time-npm-1.6.9-cde9f7ae1e-e2cd345869.zip/node_modules/@standardnotes/time/",\
|
||||
"packageDependencies": [\
|
||||
["@standardnotes/time", "npm:1.6.9"],\
|
||||
["dayjs", "npm:1.11.3"],\
|
||||
["microtime", "npm:3.1.0"],\
|
||||
["reflect-metadata", "npm:0.1.13"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:1.7.0", {\
|
||||
"packageLocation": "./.yarn/cache/@standardnotes-time-npm-1.7.0-fa2b65b191-51b168d8a5.zip/node_modules/@standardnotes/time/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -2935,6 +3018,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/dotenv", [\
|
||||
["npm:8.2.0", {\
|
||||
"packageLocation": "./.yarn/cache/@types-dotenv-npm-8.2.0-f4d0e3d65b-a1f524da7d.zip/node_modules/@types/dotenv/",\
|
||||
"packageDependencies": [\
|
||||
["@types/dotenv", "npm:8.2.0"],\
|
||||
["dotenv", "npm:16.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/express", [\
|
||||
["npm:4.17.13", {\
|
||||
"packageLocation": "./.yarn/cache/@types-express-npm-4.17.13-0e12fe9c24-12a2a0e6c4.zip/node_modules/@types/express/",\
|
||||
|
@ -2970,6 +3063,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/inversify-express-utils", [\
|
||||
["npm:2.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/@types-inversify-express-utils-npm-2.0.0-e78182955d-848aa75f0f.zip/node_modules/@types/inversify-express-utils/",\
|
||||
"packageDependencies": [\
|
||||
["@types/inversify-express-utils", "npm:2.0.0"],\
|
||||
["inversify-express-utils", "npm:6.4.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/ioredis", [\
|
||||
["npm:4.28.10", {\
|
||||
"packageLocation": "./.yarn/cache/@types-ioredis-npm-4.28.10-4bdbe26a79-0f2788cf25.zip/node_modules/@types/ioredis/",\
|
||||
|
@ -3038,6 +3141,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/jsonwebtoken", [\
|
||||
["npm:8.5.8", {\
|
||||
"packageLocation": "./.yarn/cache/@types-jsonwebtoken-npm-8.5.8-798e14708c-56738a918c.zip/node_modules/@types/jsonwebtoken/",\
|
||||
"packageDependencies": [\
|
||||
["@types/jsonwebtoken", "npm:8.5.8"],\
|
||||
["@types/node", "npm:18.0.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/keyv", [\
|
||||
["npm:3.1.4", {\
|
||||
"packageLocation": "./.yarn/cache/@types-keyv-npm-3.1.4-a8082ea56b-e009a2bfb5.zip/node_modules/@types/keyv/",\
|
||||
|
@ -4126,6 +4239,21 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["boxen", [\
|
||||
["npm:4.2.0", {\
|
||||
"packageLocation": "./.yarn/cache/boxen-npm-4.2.0-471e88ddba-ce2b565a2e.zip/node_modules/boxen/",\
|
||||
"packageDependencies": [\
|
||||
["boxen", "npm:4.2.0"],\
|
||||
["ansi-align", "npm:3.0.1"],\
|
||||
["camelcase", "npm:5.3.1"],\
|
||||
["chalk", "npm:3.0.0"],\
|
||||
["cli-boxes", "npm:2.2.1"],\
|
||||
["string-width", "npm:4.2.3"],\
|
||||
["term-size", "npm:2.2.1"],\
|
||||
["type-fest", "npm:0.8.1"],\
|
||||
["widest-line", "npm:3.1.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:5.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/boxen-npm-5.1.2-364ee34f2f-82d03e42a7.zip/node_modules/boxen/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -4424,6 +4552,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/chalk-npm-3.0.0-e813208025-8e3ddf3981.zip/node_modules/chalk/",\
|
||||
"packageDependencies": [\
|
||||
["chalk", "npm:3.0.0"],\
|
||||
["ansi-styles", "npm:4.3.0"],\
|
||||
["supports-color", "npm:7.2.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:4.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/chalk-npm-4.1.2-ba8b67ab80-fe75c9d5c7.zip/node_modules/chalk/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -6576,6 +6713,14 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:2.1.0", {\
|
||||
"packageLocation": "./.yarn/cache/global-dirs-npm-2.1.0-790e02e61c-f80b74032c.zip/node_modules/global-dirs/",\
|
||||
"packageDependencies": [\
|
||||
["global-dirs", "npm:2.1.0"],\
|
||||
["ini", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/global-dirs-npm-3.0.0-45faebeb68-953c17cf14.zip/node_modules/global-dirs/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -6724,6 +6869,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["helmet", [\
|
||||
["npm:4.3.1", {\
|
||||
"packageLocation": "./.yarn/cache/helmet-npm-4.3.1-22cd4b53d2-47f59d8b99.zip/node_modules/helmet/",\
|
||||
"packageDependencies": [\
|
||||
["helmet", "npm:4.3.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["highlight.js", [\
|
||||
["npm:10.7.3", {\
|
||||
"packageLocation": "./.yarn/cache/highlight.js-npm-10.7.3-247e67d5c0-defeafcd54.zip/node_modules/highlight.js/",\
|
||||
|
@ -7023,6 +7177,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["ini", [\
|
||||
["npm:1.3.7", {\
|
||||
"packageLocation": "./.yarn/cache/ini-npm-1.3.7-188ee858c0-f8f3801e8e.zip/node_modules/ini/",\
|
||||
"packageDependencies": [\
|
||||
["ini", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:1.3.8", {\
|
||||
"packageLocation": "./.yarn/cache/ini-npm-1.3.8-fb5040b4c0-dfd98b0ca3.zip/node_modules/ini/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -7236,6 +7397,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["is-installed-globally", [\
|
||||
["npm:0.3.2", {\
|
||||
"packageLocation": "./.yarn/cache/is-installed-globally-npm-0.3.2-a593acf078-7f7489ae30.zip/node_modules/is-installed-globally/",\
|
||||
"packageDependencies": [\
|
||||
["is-installed-globally", "npm:0.3.2"],\
|
||||
["global-dirs", "npm:2.1.0"],\
|
||||
["is-path-inside", "npm:3.0.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:0.4.0", {\
|
||||
"packageLocation": "./.yarn/cache/is-installed-globally-npm-0.4.0-a30dd056c7-3359840d59.zip/node_modules/is-installed-globally/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -7265,6 +7435,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["is-npm", [\
|
||||
["npm:4.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/is-npm-npm-4.0.0-86d312340b-c0d1550266.zip/node_modules/is-npm/",\
|
||||
"packageDependencies": [\
|
||||
["is-npm", "npm:4.0.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:5.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/is-npm-npm-5.0.0-2758bcd54b-9baff02b0c.zip/node_modules/is-npm/",\
|
||||
"packageDependencies": [\
|
||||
|
@ -9240,6 +9417,23 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
["update-notifier", "npm:5.1.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:2.0.7", {\
|
||||
"packageLocation": "./.yarn/unplugged/nodemon-npm-2.0.7-7b95e46511/node_modules/nodemon/",\
|
||||
"packageDependencies": [\
|
||||
["nodemon", "npm:2.0.7"],\
|
||||
["chokidar", "npm:3.5.3"],\
|
||||
["debug", "virtual:f564cd587f82296d3fd6026dfab3e339413babae6e81b9c38de9addd7cd419ff4ad05c2c7d821d4792f5d97254f1f8a10edadcbab7fc3eef777350e5087c47c4#npm:3.2.7"],\
|
||||
["ignore-by-default", "npm:1.0.1"],\
|
||||
["minimatch", "npm:3.1.2"],\
|
||||
["pstree.remy", "npm:1.1.8"],\
|
||||
["semver", "npm:5.7.1"],\
|
||||
["supports-color", "npm:5.5.0"],\
|
||||
["touch", "npm:3.1.0"],\
|
||||
["undefsafe", "npm:2.0.5"],\
|
||||
["update-notifier", "npm:4.1.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["nopt", [\
|
||||
|
@ -11390,6 +11584,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["term-size", [\
|
||||
["npm:2.2.1", {\
|
||||
"packageLocation": "./.yarn/unplugged/term-size-npm-2.2.1-77ce7141d0/node_modules/term-size/",\
|
||||
"packageDependencies": [\
|
||||
["term-size", "npm:2.2.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["terminal-link", [\
|
||||
["npm:2.1.1", {\
|
||||
"packageLocation": "./.yarn/cache/terminal-link-npm-2.1.1-de80341758-ce3d2cd3a4.zip/node_modules/terminal-link/",\
|
||||
|
@ -12130,6 +12333,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
|
|||
}]\
|
||||
]],\
|
||||
["update-notifier", [\
|
||||
["npm:4.1.3", {\
|
||||
"packageLocation": "./.yarn/cache/update-notifier-npm-4.1.3-837e724aca-67652056e6.zip/node_modules/update-notifier/",\
|
||||
"packageDependencies": [\
|
||||
["update-notifier", "npm:4.1.3"],\
|
||||
["boxen", "npm:4.2.0"],\
|
||||
["chalk", "npm:3.0.0"],\
|
||||
["configstore", "npm:5.0.1"],\
|
||||
["has-yarn", "npm:2.1.0"],\
|
||||
["import-lazy", "npm:2.1.0"],\
|
||||
["is-ci", "npm:2.0.0"],\
|
||||
["is-installed-globally", "npm:0.3.2"],\
|
||||
["is-npm", "npm:4.0.0"],\
|
||||
["is-yarn-global", "npm:0.3.0"],\
|
||||
["latest-version", "npm:5.1.0"],\
|
||||
["pupa", "npm:2.1.1"],\
|
||||
["semver-diff", "npm:3.1.1"],\
|
||||
["xdg-basedir", "npm:4.0.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:5.1.0", {\
|
||||
"packageLocation": "./.yarn/cache/update-notifier-npm-5.1.0-6bf595ecee-461e5e5b00.zip/node_modules/update-notifier/",\
|
||||
"packageDependencies": [\
|
||||
|
|
BIN
.yarn/cache/@standardnotes-payloads-npm-1.5.1-45dffe2f5c-65c28421e7.zip
vendored
Normal file
BIN
.yarn/cache/@standardnotes-payloads-npm-1.5.1-45dffe2f5c-65c28421e7.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@standardnotes-time-npm-1.6.9-cde9f7ae1e-e2cd345869.zip
vendored
Normal file
BIN
.yarn/cache/@standardnotes-time-npm-1.6.9-cde9f7ae1e-e2cd345869.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-dotenv-npm-8.2.0-f4d0e3d65b-a1f524da7d.zip
vendored
Normal file
BIN
.yarn/cache/@types-dotenv-npm-8.2.0-f4d0e3d65b-a1f524da7d.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-inversify-express-utils-npm-2.0.0-e78182955d-848aa75f0f.zip
vendored
Normal file
BIN
.yarn/cache/@types-inversify-express-utils-npm-2.0.0-e78182955d-848aa75f0f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-jsonwebtoken-npm-8.5.8-798e14708c-56738a918c.zip
vendored
Normal file
BIN
.yarn/cache/@types-jsonwebtoken-npm-8.5.8-798e14708c-56738a918c.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/boxen-npm-4.2.0-471e88ddba-ce2b565a2e.zip
vendored
Normal file
BIN
.yarn/cache/boxen-npm-4.2.0-471e88ddba-ce2b565a2e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/chalk-npm-3.0.0-e813208025-8e3ddf3981.zip
vendored
Normal file
BIN
.yarn/cache/chalk-npm-3.0.0-e813208025-8e3ddf3981.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/global-dirs-npm-2.1.0-790e02e61c-f80b74032c.zip
vendored
Normal file
BIN
.yarn/cache/global-dirs-npm-2.1.0-790e02e61c-f80b74032c.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/helmet-npm-4.3.1-22cd4b53d2-47f59d8b99.zip
vendored
Normal file
BIN
.yarn/cache/helmet-npm-4.3.1-22cd4b53d2-47f59d8b99.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/ini-npm-1.3.7-188ee858c0-f8f3801e8e.zip
vendored
Normal file
BIN
.yarn/cache/ini-npm-1.3.7-188ee858c0-f8f3801e8e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/is-installed-globally-npm-0.3.2-a593acf078-7f7489ae30.zip
vendored
Normal file
BIN
.yarn/cache/is-installed-globally-npm-0.3.2-a593acf078-7f7489ae30.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/is-npm-npm-4.0.0-86d312340b-c0d1550266.zip
vendored
Normal file
BIN
.yarn/cache/is-npm-npm-4.0.0-86d312340b-c0d1550266.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/nodemon-npm-2.0.7-7b95e46511-d1af0b92e7.zip
vendored
Normal file
BIN
.yarn/cache/nodemon-npm-2.0.7-7b95e46511-d1af0b92e7.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/term-size-npm-2.2.1-77ce7141d0-1ed9813354.zip
vendored
Normal file
BIN
.yarn/cache/term-size-npm-2.2.1-77ce7141d0-1ed9813354.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/update-notifier-npm-4.1.3-837e724aca-67652056e6.zip
vendored
Normal file
BIN
.yarn/cache/update-notifier-npm-4.1.3-837e724aca-67652056e6.zip
vendored
Normal file
Binary file not shown.
|
@ -14,17 +14,22 @@
|
|||
"lint": "yarn workspaces foreach -p -j 10 --verbose run lint",
|
||||
"lint:auth": "yarn workspace @standardnotes/auth-server lint",
|
||||
"lint:scheduler": "yarn workspace @standardnotes/scheduler-server lint",
|
||||
"lint:syncing-server": "yarn workspace @standardnotes/syncing-server lint",
|
||||
"test": "yarn workspaces foreach -p -j 10 --verbose run test",
|
||||
"test:auth": "yarn workspace @standardnotes/auth-server test",
|
||||
"test:scheduler": "yarn workspace @standardnotes/scheduler-server test",
|
||||
"test:syncing-server": "yarn workspace @standardnotes/syncing-server test",
|
||||
"clean": "yarn workspaces foreach -p --verbose run clean",
|
||||
"setup:env": "yarn workspaces foreach -p --verbose run setup:env",
|
||||
"build": "yarn workspaces foreach -pt -j 10 --verbose run build",
|
||||
"build:auth": "yarn workspace @standardnotes/auth-server build",
|
||||
"build:scheduler": "yarn workspace @standardnotes/scheduler-server build",
|
||||
"build:syncing-server": "yarn workspace @standardnotes/syncing-server build",
|
||||
"start:auth": "yarn workspace @standardnotes/auth-server start",
|
||||
"start:auth-worker": "yarn workspace @standardnotes/auth-server worker",
|
||||
"start:scheduler": "yarn workspace @standardnotes/scheduler-server worker",
|
||||
"start:syncing-server": "yarn workspace @standardnotes/syncing-server start",
|
||||
"start:syncing-server-worker": "yarn workspace @standardnotes/syncing-server worker",
|
||||
"release:beta": "lerna version --conventional-prerelease --conventional-commits --yes -m \"chore(release): publish\""
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -58,7 +58,7 @@ export const AppDataSource = new DataSource({
|
|||
SubscriptionSetting,
|
||||
AnalyticsEntity,
|
||||
],
|
||||
migrations: [env.get('DB_MIGRATIONS_PATH')],
|
||||
migrations: [env.get('DB_MIGRATIONS_PATH', true) ?? 'dist/migrations/*.js'],
|
||||
migrationsRun: true,
|
||||
logging: <LoggerOptions>env.get('DB_DEBUG_LEVEL'),
|
||||
})
|
||||
|
|
44
packages/syncing-server/.env.sample
Normal file
44
packages/syncing-server/.env.sample
Normal file
|
@ -0,0 +1,44 @@
|
|||
LOG_LEVEL=info
|
||||
NODE_ENV=development
|
||||
VERSION=development
|
||||
|
||||
AUTH_JWT_SECRET=auth_jwt_secret
|
||||
|
||||
PORT=3000
|
||||
|
||||
DB_HOST=db
|
||||
DB_REPLICA_HOST=db
|
||||
DB_PORT=3306
|
||||
DB_USERNAME=std_notes_user
|
||||
DB_PASSWORD=changeme123
|
||||
DB_DATABASE=standard_notes_db
|
||||
DB_DEBUG_LEVEL=all # "all" | "query" | "schema" | "error" | "warn" | "info" | "log" | "migration"
|
||||
DB_MIGRATIONS_PATH=dist/migrations/*.js
|
||||
|
||||
REDIS_URL=redis://cache
|
||||
|
||||
SNS_TOPIC_ARN=
|
||||
SNS_AWS_REGION=
|
||||
SQS_QUEUE_URL=
|
||||
SQS_AWS_REGION=
|
||||
S3_AWS_REGION=
|
||||
S3_BACKUP_BUCKET_NAME=
|
||||
|
||||
REDIS_EVENTS_CHANNEL=events
|
||||
|
||||
INTERNAL_DNS_REROUTE_ENABLED=false
|
||||
EXTENSIONS_SERVER_URL=http://extensions-server:3004
|
||||
AUTH_SERVER_URL=http://auth:3000
|
||||
|
||||
EMAIL_ATTACHMENT_MAX_BYTE_SIZE=10485760
|
||||
|
||||
REVISIONS_FREQUENCY=300
|
||||
|
||||
# (Optional) New Relic Setup
|
||||
NEW_RELIC_ENABLED=false
|
||||
NEW_RELIC_APP_NAME="Syncing Server JS"
|
||||
NEW_RELIC_LICENSE_KEY=
|
||||
NEW_RELIC_NO_CONFIG_FILE=true
|
||||
NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=false
|
||||
NEW_RELIC_LOG_ENABLED=false
|
||||
NEW_RELIC_LOG_LEVEL=info
|
3
packages/syncing-server/.eslintignore
Normal file
3
packages/syncing-server/.eslintignore
Normal file
|
@ -0,0 +1,3 @@
|
|||
dist
|
||||
test-setup.ts
|
||||
data
|
6
packages/syncing-server/.eslintrc
Normal file
6
packages/syncing-server/.eslintrc
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"extends": "../../.eslintrc",
|
||||
"parserOptions": {
|
||||
"project": "./linter.tsconfig.json"
|
||||
}
|
||||
}
|
25
packages/syncing-server/Dockerfile
Normal file
25
packages/syncing-server/Dockerfile
Normal file
|
@ -0,0 +1,25 @@
|
|||
FROM node:16.15.1-alpine AS builder
|
||||
|
||||
# Install dependencies for building native libraries
|
||||
RUN apk add --update git openssh-client python3 alpine-sdk
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
# docker-build plugin copies everything needed for `yarn install` to `manifests` folder.
|
||||
COPY manifests ./
|
||||
|
||||
RUN yarn install --immutable
|
||||
|
||||
FROM node:16.15.1-alpine
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
# Copy the installed dependencies from the previous stage.
|
||||
COPY --from=builder /workspace ./
|
||||
|
||||
# docker-build plugin runs `yarn pack` in all workspace dependencies and copies them to `packs` folder.
|
||||
COPY packs ./
|
||||
|
||||
ENTRYPOINT [ "/workspace/packages/auth/docker/entrypoint.sh" ]
|
||||
|
||||
CMD [ "start-web" ]
|
94
packages/syncing-server/bin/server.ts
Normal file
94
packages/syncing-server/bin/server.ts
Normal file
|
@ -0,0 +1,94 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import 'newrelic'
|
||||
|
||||
import * as Sentry from '@sentry/node'
|
||||
|
||||
import '../src/Controller/HealthCheckController'
|
||||
import '../src/Controller/RevisionsController'
|
||||
import '../src/Controller/ItemsController'
|
||||
|
||||
import * as helmet from 'helmet'
|
||||
import * as cors from 'cors'
|
||||
import { urlencoded, json, Request, Response, NextFunction, RequestHandler, ErrorRequestHandler } from 'express'
|
||||
import * as winston from 'winston'
|
||||
|
||||
import { InversifyExpressServer } from 'inversify-express-utils'
|
||||
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
|
||||
import TYPES from '../src/Bootstrap/Types'
|
||||
import { Env } from '../src/Bootstrap/Env'
|
||||
|
||||
const container = new ContainerConfigLoader()
|
||||
void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const server = new InversifyExpressServer(container)
|
||||
|
||||
server.setConfig((app) => {
|
||||
app.use((_request: Request, response: Response, next: NextFunction) => {
|
||||
response.setHeader('X-SSJS-Version', container.get(TYPES.VERSION))
|
||||
next()
|
||||
})
|
||||
/* eslint-disable */
|
||||
app.use(helmet({
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["https: 'self'"],
|
||||
baseUri: ["'self'"],
|
||||
childSrc: ["*", "blob:"],
|
||||
connectSrc: ["*"],
|
||||
fontSrc: ["*", "'self'"],
|
||||
formAction: ["'self'"],
|
||||
frameAncestors: ["*", "*.standardnotes.org"],
|
||||
frameSrc: ["*", "blob:"],
|
||||
imgSrc: ["'self'", "*", "data:"],
|
||||
manifestSrc: ["'self'"],
|
||||
mediaSrc: ["'self'"],
|
||||
objectSrc: ["'self'"],
|
||||
scriptSrc: ["'self'"],
|
||||
styleSrc: ["'self'"]
|
||||
}
|
||||
}
|
||||
}))
|
||||
/* eslint-enable */
|
||||
app.use(json({ limit: '50mb' }))
|
||||
app.use(urlencoded({ extended: true, limit: '50mb', parameterLimit: 5000 }))
|
||||
app.use(cors())
|
||||
|
||||
if (env.get('SENTRY_DSN', true)) {
|
||||
Sentry.init({
|
||||
dsn: env.get('SENTRY_DSN'),
|
||||
integrations: [new Sentry.Integrations.Http({ tracing: false, breadcrumbs: true })],
|
||||
tracesSampleRate: 0,
|
||||
})
|
||||
|
||||
app.use(Sentry.Handlers.requestHandler() as RequestHandler)
|
||||
}
|
||||
})
|
||||
|
||||
const logger: winston.Logger = container.get(TYPES.Logger)
|
||||
|
||||
server.setErrorConfig((app) => {
|
||||
if (env.get('SENTRY_DSN', true)) {
|
||||
app.use(Sentry.Handlers.errorHandler() as ErrorRequestHandler)
|
||||
}
|
||||
|
||||
app.use((error: Record<string, unknown>, _request: Request, response: Response, _next: NextFunction) => {
|
||||
logger.error(error.stack)
|
||||
|
||||
response.status(500).send({
|
||||
error: {
|
||||
message:
|
||||
"Unfortunately, we couldn't handle your request. Please try again or contact our support if the error persists.",
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const serverInstance = server.build()
|
||||
|
||||
serverInstance.listen(env.get('PORT'))
|
||||
|
||||
logger.info(`Server started on port ${process.env.PORT}`)
|
||||
})
|
25
packages/syncing-server/bin/worker.ts
Normal file
25
packages/syncing-server/bin/worker.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import 'newrelic'
|
||||
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
|
||||
import TYPES from '../src/Bootstrap/Types'
|
||||
import { Env } from '../src/Bootstrap/Env'
|
||||
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
|
||||
|
||||
const container = new ContainerConfigLoader()
|
||||
void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const logger: Logger = container.get(TYPES.Logger)
|
||||
|
||||
logger.info('Starting worker...')
|
||||
|
||||
const subscriberFactory: DomainEventSubscriberFactoryInterface = container.get(TYPES.DomainEventSubscriberFactory)
|
||||
subscriberFactory.create().start()
|
||||
|
||||
setInterval(() => logger.info('Alive and kicking!'), 20 * 60 * 1000)
|
||||
})
|
27
packages/syncing-server/docker/entrypoint.sh
Executable file
27
packages/syncing-server/docker/entrypoint.sh
Executable file
|
@ -0,0 +1,27 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
COMMAND=$1 && shift 1
|
||||
|
||||
case "$COMMAND" in
|
||||
'start-local')
|
||||
echo "Starting Web in Local Mode..."
|
||||
yarn workspace @standardnotes/syncing-server start:local
|
||||
;;
|
||||
|
||||
'start-web' )
|
||||
echo "Starting Web..."
|
||||
yarn workspace @standardnotes/syncing-server start
|
||||
;;
|
||||
|
||||
'start-worker' )
|
||||
echo "Starting Worker..."
|
||||
yarn workspace @standardnotes/syncing-server worker
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown command"
|
||||
;;
|
||||
esac
|
||||
|
||||
exec "$@"
|
18
packages/syncing-server/jest.config.js
Normal file
18
packages/syncing-server/jest.config.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const base = require('../../jest.config');
|
||||
|
||||
module.exports = {
|
||||
...base,
|
||||
globals: {
|
||||
'ts-jest': {
|
||||
tsconfig: 'tsconfig.json',
|
||||
},
|
||||
},
|
||||
coveragePathIgnorePatterns: [
|
||||
'/Bootstrap/',
|
||||
'HealthCheckController'
|
||||
],
|
||||
setupFilesAfterEnv: [
|
||||
'./test-setup.ts'
|
||||
]
|
||||
};
|
4
packages/syncing-server/linter.tsconfig.json
Normal file
4
packages/syncing-server/linter.tsconfig.json
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"exclude": ["dist", "test-setup.ts"]
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class initDatabase1606470249552 implements MigrationInterface {
|
||||
name = 'initDatabase1606470249552'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await this.fixUpdatedAtTimestampsFromLegacyMigration(queryRunner)
|
||||
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE IF NOT EXISTS `items` (`uuid` varchar(36) NOT NULL, `duplicate_of` varchar(36) NULL, `items_key_id` varchar(255) NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `user_uuid` varchar(36) NULL, `deleted` tinyint(1) NULL DEFAULT 0, `last_user_agent` text NULL, `created_at` datetime(6) NOT NULL, `updated_at` datetime(6) NOT NULL, `created_at_timestamp` BIGINT NOT NULL, `updated_at_timestamp` BIGINT NOT NULL, INDEX `index_items_on_content_type` (`content_type`), INDEX `index_items_on_user_uuid` (`user_uuid`), INDEX `index_items_on_deleted` (`deleted`), INDEX `updated_at_timestamp` (`updated_at_timestamp`), INDEX `index_items_on_updated_at` (`updated_at`), INDEX `user_uuid_and_updated_at_timestamp_and_created_at_timestamp` (`user_uuid`, `updated_at_timestamp`, `created_at_timestamp`), INDEX `index_items_on_user_uuid_and_updated_at_and_created_at` (`user_uuid`, `updated_at`, `created_at`), INDEX `index_items_on_user_uuid_and_content_type` (`user_uuid`, `content_type`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
|
||||
)
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE IF NOT EXISTS `revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `items_key_id` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `creation_date` date NULL, `created_at` datetime(6) NULL, `updated_at` datetime(6) NULL, INDEX `index_revisions_on_item_uuid` (`item_uuid`), INDEX `index_revisions_on_creation_date` (`creation_date`), INDEX `index_revisions_on_created_at` (`created_at`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
|
||||
)
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE IF NOT EXISTS `item_revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `revision_uuid` varchar(36) NOT NULL, INDEX `index_item_revisions_on_item_uuid` (`item_uuid`), INDEX `index_item_revisions_on_revision_uuid` (`revision_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
|
||||
)
|
||||
}
|
||||
|
||||
public async down(_queryRunner: QueryRunner): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
private async fixUpdatedAtTimestampsFromLegacyMigration(queryRunner: QueryRunner): Promise<void> {
|
||||
const itemsTableExistsQueryResult = await queryRunner.manager.query(
|
||||
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "items"',
|
||||
)
|
||||
const itemsTableExists = itemsTableExistsQueryResult[0].count === 1
|
||||
if (!itemsTableExists) {
|
||||
return
|
||||
}
|
||||
|
||||
const updatedAtTimestampColumnExistsQueryResult = await queryRunner.manager.query(
|
||||
'SELECT COUNT(*) as count FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = "items" AND column_name = "updated_at_timestamp"',
|
||||
)
|
||||
const updatedAtTimestampColumnExists = updatedAtTimestampColumnExistsQueryResult[0].count === 1
|
||||
if (updatedAtTimestampColumnExists) {
|
||||
return
|
||||
}
|
||||
|
||||
await queryRunner.query('ALTER TABLE `items` ADD COLUMN `updated_at_timestamp` BIGINT NOT NULL')
|
||||
await queryRunner.query('ALTER TABLE `items` ADD COLUMN `created_at_timestamp` BIGINT NOT NULL')
|
||||
await queryRunner.query(
|
||||
'ALTER TABLE `items` ADD INDEX `user_uuid_and_updated_at_timestamp_and_created_at_timestamp` (`user_uuid`, `updated_at_timestamp`, `created_at_timestamp`)',
|
||||
)
|
||||
await queryRunner.query('ALTER TABLE `items` ADD INDEX `updated_at_timestamp` (`updated_at_timestamp`)')
|
||||
await queryRunner.query('UPDATE `items` SET `created_at_timestamp` = UNIX_TIMESTAMP(`created_at`) * 1000000')
|
||||
await queryRunner.query('UPDATE `items` SET `updated_at_timestamp` = UNIX_TIMESTAMP(`updated_at`) * 1000000')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class addExtensionSettings1617615657558 implements MigrationInterface {
|
||||
name = 'addExtensionSettings1617615657558'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE IF NOT EXISTS `extension_settings` (`uuid` varchar(36) NOT NULL, `extension_id` varchar(255) NULL, `mute_emails` tinyint(1) NULL DEFAULT 0, `created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `updated_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, INDEX `index_extension_settings_on_extension_id` (`extension_id`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
|
||||
)
|
||||
}
|
||||
|
||||
public async down(_queryRunner: QueryRunner): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class dropUnusedIndexes1629964808297 implements MigrationInterface {
|
||||
name = 'dropUnusedIndexes1629964808297'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const indexItemsOnUserAndTimestamp = await queryRunner.manager.query(
|
||||
'SHOW INDEX FROM `items` where `key_name` = "index_items_on_user_uuid_and_updated_at_and_created_at"',
|
||||
)
|
||||
const indexItemsOnUserAndTimestampExists = indexItemsOnUserAndTimestamp && indexItemsOnUserAndTimestamp.length > 0
|
||||
if (indexItemsOnUserAndTimestampExists) {
|
||||
await queryRunner.query('ALTER TABLE `items` DROP INDEX index_items_on_user_uuid_and_updated_at_and_created_at')
|
||||
}
|
||||
|
||||
const indexItemsOnUpdatedAt = await queryRunner.manager.query(
|
||||
'SHOW INDEX FROM `items` where `key_name` = "index_items_on_updated_at"',
|
||||
)
|
||||
const indexItemsOnUpdatedAtExists = indexItemsOnUpdatedAt && indexItemsOnUpdatedAt.length > 0
|
||||
if (indexItemsOnUpdatedAtExists) {
|
||||
await queryRunner.query('ALTER TABLE `items` DROP INDEX index_items_on_updated_at')
|
||||
}
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class refactorCalculatingIntegrityHash1630318893601 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` ADD INDEX `user_uuid_and_deleted` (`user_uuid`, `deleted`)')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class restrictContentType1630417724617 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('UPDATE `items` SET content_type = "Unknown" WHERE `content_type` IS NULL')
|
||||
await queryRunner.query('ALTER TABLE `items` CHANGE `content_type` `content_type` varchar(255) NOT NULL')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
import { v4 } from 'uuid'
|
||||
|
||||
export class addRevisionForDuplicatedItems1631529502150 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const itemRevisions = await queryRunner.manager.query(
|
||||
'SELECT r.uuid as originalRevisionUuid, ir.item_uuid as properItemUuid, ir.uuid as relationUuid FROM revisions r INNER JOIN item_revisions ir ON ir.revision_uuid = r.uuid AND ir.item_uuid <> r.item_uuid',
|
||||
)
|
||||
|
||||
for (const itemRevision of itemRevisions) {
|
||||
const revisionUuid = v4()
|
||||
|
||||
await queryRunner.manager.query(
|
||||
`INSERT INTO revisions (uuid, item_uuid, content, content_type, items_key_id, enc_item_key, auth_hash, creation_date, created_at, updated_at) SELECT "${revisionUuid}", "${itemRevision['properItemUuid']}", content, content_type, items_key_id, enc_item_key, auth_hash, creation_date, created_at, updated_at FROM revisions WHERE uuid = "${itemRevision['originalRevisionUuid']}"`,
|
||||
)
|
||||
await queryRunner.manager.query(
|
||||
`UPDATE item_revisions SET revision_uuid = "${revisionUuid}" WHERE uuid = "${itemRevision['relationUuid']}"`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class dropItemRevisionsJoiningTable1631530260504 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('DROP TABLE `item_revisions`')
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE `item_revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `revision_uuid` varchar(36) NOT NULL, INDEX `index_item_revisions_on_item_uuid` (`item_uuid`), INDEX `index_item_revisions_on_revision_uuid` (`revision_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class cleanupOrphanItemsAndRevisions1632219307742 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const usersTableExistsQueryResult = await queryRunner.manager.query(
|
||||
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "users"',
|
||||
)
|
||||
const usersTableExists = usersTableExistsQueryResult[0].count === 1
|
||||
if (usersTableExists) {
|
||||
const orphanedItems = await queryRunner.manager.query(
|
||||
'SELECT i.uuid as uuid FROM items i LEFT JOIN users u ON i.user_uuid = u.uuid WHERE u.uuid IS NULL',
|
||||
)
|
||||
|
||||
for (const orphanedItem of orphanedItems) {
|
||||
await queryRunner.manager.query(`DELETE FROM revisions WHERE item_uuid = "${orphanedItem['uuid']}"`)
|
||||
await queryRunner.manager.query(`DELETE FROM items WHERE uuid = "${orphanedItem['uuid']}"`)
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.manager.query('DELETE FROM items WHERE user_uuid IS NULL')
|
||||
|
||||
const orphanedRevisions = await queryRunner.manager.query(
|
||||
'SELECT r.uuid as uuid FROM revisions r LEFT JOIN items i ON r.item_uuid = i.uuid WHERE i.uuid IS NULL',
|
||||
)
|
||||
|
||||
for (const orphanedRevision of orphanedRevisions) {
|
||||
await queryRunner.manager.query(`DELETE FROM revisions WHERE uuid = "${orphanedRevision['uuid']}"`)
|
||||
}
|
||||
|
||||
await queryRunner.manager.query('DELETE FROM revisions WHERE item_uuid IS NULL')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class addRevisionsItemsRelation1632221263106 implements MigrationInterface {
|
||||
name = 'addRevisionsItemsRelation1632221263106'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const indexRevisionsOnItemUuid = await queryRunner.manager.query(
|
||||
'SHOW INDEX FROM `revisions` where `key_name` = "index_revisions_on_item_uuid"',
|
||||
)
|
||||
const indexRevisionsOnItemUuidExists = indexRevisionsOnItemUuid && indexRevisionsOnItemUuid.length > 0
|
||||
if (indexRevisionsOnItemUuidExists) {
|
||||
await queryRunner.query('DROP INDEX `index_revisions_on_item_uuid` ON `revisions`')
|
||||
}
|
||||
|
||||
await queryRunner.query('ALTER TABLE `revisions` CHANGE `item_uuid` `item_uuid` varchar(36) NOT NULL')
|
||||
await queryRunner.query('ALTER TABLE `items` CHANGE `user_uuid` `user_uuid` varchar(36) NOT NULL')
|
||||
await queryRunner.query(
|
||||
'ALTER TABLE `revisions` ADD CONSTRAINT `FK_ab3b92e54701fe3010022a31d90` FOREIGN KEY (`item_uuid`) REFERENCES `items`(`uuid`) ON DELETE CASCADE ON UPDATE NO ACTION',
|
||||
)
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `revisions` DROP FOREIGN KEY `FK_ab3b92e54701fe3010022a31d90`')
|
||||
await queryRunner.query('ALTER TABLE `items` CHANGE `user_uuid` `user_uuid` varchar(36) NULL')
|
||||
await queryRunner.query('ALTER TABLE `revisions` CHANGE `item_uuid` `item_uuid` varchar(36) NULL')
|
||||
await queryRunner.query('CREATE INDEX `index_revisions_on_item_uuid` ON `revisions` (`item_uuid`)')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class addItemContentSize1637738491169 implements MigrationInterface {
|
||||
name = 'addItemContentSize1637738491169'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` ADD `content_size` INT UNSIGNED NULL')
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `content_size`')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class removeExtensionSettings1639134926025 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('DROP TABLE `extension_settings`')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class removeSfExtensionItems1642073387521 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.manager.query('DELETE FROM items WHERE content_type = "SF|Extension"')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class removeUserAgent1647501696205 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `last_user_agent`')
|
||||
}
|
||||
|
||||
public async down(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm'
|
||||
|
||||
export class addUpdatedWithSession1654518291191 implements MigrationInterface {
|
||||
name = 'addUpdatedWithSession1654518291191'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` ADD `updated_with_session` varchar(36) NULL')
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `updated_with_session`')
|
||||
}
|
||||
}
|
74
packages/syncing-server/package.json
Normal file
74
packages/syncing-server/package.json
Normal file
|
@ -0,0 +1,74 @@
|
|||
{
|
||||
"name": "@standardnotes/syncing-server",
|
||||
"version": "1.0.0",
|
||||
"engines": {
|
||||
"node": ">=16.0.0 <17.0.0"
|
||||
},
|
||||
"description": "Syncing Server",
|
||||
"main": "dist/src/index.js",
|
||||
"typings": "dist/src/index.d.ts",
|
||||
"repository": "git@github.com:standardnotes/syncing-server-js.git",
|
||||
"author": "Karol Sójko <karolsojko@standardnotes.com>",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"scripts": {
|
||||
"clean": "rm -fr dist",
|
||||
"prebuild": "yarn clean",
|
||||
"build": "tsc --rootDir ./",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"pretest": "yarn lint && yarn build",
|
||||
"test": "jest --coverage --config=./jest.config.js --maxWorkers=50%",
|
||||
"start": "yarn node dist/bin/server.js",
|
||||
"worker": "yarn node dist/bin/worker.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@newrelic/native-metrics": "7.0.2",
|
||||
"@newrelic/winston-enricher": "^2.1.0",
|
||||
"@sentry/node": "^6.16.1",
|
||||
"@standardnotes/analytics": "^1.6.0",
|
||||
"@standardnotes/auth": "^3.19.2",
|
||||
"@standardnotes/common": "^1.22.0",
|
||||
"@standardnotes/domain-events": "^2.32.2",
|
||||
"@standardnotes/domain-events-infra": "^1.5.2",
|
||||
"@standardnotes/payloads": "^1.5.1",
|
||||
"@standardnotes/responses": "^1.6.15",
|
||||
"@standardnotes/settings": "1.14.3",
|
||||
"@standardnotes/time": "1.6.9",
|
||||
"aws-sdk": "^2.1159.0",
|
||||
"axios": "0.24.0",
|
||||
"cors": "2.8.5",
|
||||
"dotenv": "8.2.0",
|
||||
"express": "4.17.1",
|
||||
"helmet": "4.3.1",
|
||||
"inversify": "^6.0.1",
|
||||
"inversify-express-utils": "^6.4.3",
|
||||
"ioredis": "^5.0.6",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"mysql2": "^2.3.3",
|
||||
"newrelic": "8.6.0",
|
||||
"nodemon": "2.0.7",
|
||||
"prettyjson": "1.2.1",
|
||||
"reflect-metadata": "0.1.13",
|
||||
"typeorm": "^0.3.6",
|
||||
"ua-parser-js": "1.0.2",
|
||||
"uuid": "8.3.2",
|
||||
"winston": "3.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.9",
|
||||
"@types/dotenv": "^8.2.0",
|
||||
"@types/express": "^4.17.9",
|
||||
"@types/inversify-express-utils": "^2.0.0",
|
||||
"@types/ioredis": "^4.28.10",
|
||||
"@types/jest": "^28.1.3",
|
||||
"@types/jsonwebtoken": "^8.5.0",
|
||||
"@types/newrelic": "^7.0.2",
|
||||
"@types/prettyjson": "^0.0.29",
|
||||
"@types/ua-parser-js": "^0.7.36",
|
||||
"@types/uuid": "^8.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.29.0",
|
||||
"eslint": "^8.14.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"jest": "^28.1.1",
|
||||
"ts-jest": "^28.0.1"
|
||||
}
|
||||
}
|
317
packages/syncing-server/src/Bootstrap/Container.ts
Normal file
317
packages/syncing-server/src/Bootstrap/Container.ts
Normal file
|
@ -0,0 +1,317 @@
|
|||
import * as winston from 'winston'
|
||||
import Redis from 'ioredis'
|
||||
import * as AWS from 'aws-sdk'
|
||||
import { Container } from 'inversify'
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventMessageHandlerInterface,
|
||||
DomainEventSubscriberFactoryInterface,
|
||||
} from '@standardnotes/domain-events'
|
||||
import {
|
||||
AnalyticsStoreInterface,
|
||||
PeriodKeyGenerator,
|
||||
RedisAnalyticsStore,
|
||||
RedisStatisticsStore,
|
||||
StatisticsStoreInterface,
|
||||
} from '@standardnotes/analytics'
|
||||
|
||||
import { Env } from './Env'
|
||||
import TYPES from './Types'
|
||||
import { AuthMiddleware } from '../Controller/AuthMiddleware'
|
||||
import { MySQLRevisionRepository } from '../Infra/MySQL/MySQLRevisionRepository'
|
||||
import { Item } from '../Domain/Item/Item'
|
||||
import { Revision } from '../Domain/Revision/Revision'
|
||||
import { RevisionProjector } from '../Projection/RevisionProjector'
|
||||
import { MySQLItemRepository } from '../Infra/MySQL/MySQLItemRepository'
|
||||
import { ContentDecoder } from '../Domain/Item/ContentDecoder'
|
||||
import { DomainEventFactory } from '../Domain/Event/DomainEventFactory'
|
||||
import { SyncResponseFactory20161215 } from '../Domain/Item/SyncResponse/SyncResponseFactory20161215'
|
||||
import { SyncResponseFactory20200115 } from '../Domain/Item/SyncResponse/SyncResponseFactory20200115'
|
||||
import { SyncResponseFactoryResolverInterface } from '../Domain/Item/SyncResponse/SyncResponseFactoryResolverInterface'
|
||||
import { SyncResponseFactoryResolver } from '../Domain/Item/SyncResponse/SyncResponseFactoryResolver'
|
||||
import { ItemServiceInterface } from '../Domain/Item/ItemServiceInterface'
|
||||
import { ItemService } from '../Domain/Item/ItemService'
|
||||
import { AuthHttpServiceInterface } from '../Domain/Auth/AuthHttpServiceInterface'
|
||||
import { AuthHttpService } from '../Infra/HTTP/AuthHttpService'
|
||||
import { SyncItems } from '../Domain/UseCase/SyncItems'
|
||||
import { ExtensionsHttpServiceInterface } from '../Domain/Extension/ExtensionsHttpServiceInterface'
|
||||
import { ExtensionsHttpService } from '../Domain/Extension/ExtensionsHttpService'
|
||||
import { ItemBackupServiceInterface } from '../Domain/Item/ItemBackupServiceInterface'
|
||||
import { S3ItemBackupService } from '../Infra/S3/S3ItemBackupService'
|
||||
import { DomainEventFactoryInterface } from '../Domain/Event/DomainEventFactoryInterface'
|
||||
import { ItemsSyncedEventHandler } from '../Domain/Handler/ItemsSyncedEventHandler'
|
||||
import { EmailArchiveExtensionSyncedEventHandler } from '../Domain/Handler/EmailArchiveExtensionSyncedEventHandler'
|
||||
import { RevisionServiceInterface } from '../Domain/Revision/RevisionServiceInterface'
|
||||
import { RevisionService } from '../Domain/Revision/RevisionService'
|
||||
import { DuplicateItemSyncedEventHandler } from '../Domain/Handler/DuplicateItemSyncedEventHandler'
|
||||
import { AccountDeletionRequestedEventHandler } from '../Domain/Handler/AccountDeletionRequestedEventHandler'
|
||||
import { ItemProjector } from '../Projection/ItemProjector'
|
||||
import { ItemConflictProjector } from '../Projection/ItemConflictProjector'
|
||||
import { Timer, TimerInterface } from '@standardnotes/time'
|
||||
import { ItemSaveValidatorInterface } from '../Domain/Item/SaveValidator/ItemSaveValidatorInterface'
|
||||
import { ItemSaveValidator } from '../Domain/Item/SaveValidator/ItemSaveValidator'
|
||||
import { OwnershipFilter } from '../Domain/Item/SaveRule/OwnershipFilter'
|
||||
import { TimeDifferenceFilter } from '../Domain/Item/SaveRule/TimeDifferenceFilter'
|
||||
import { ItemFactoryInterface } from '../Domain/Item/ItemFactoryInterface'
|
||||
import { ItemFactory } from '../Domain/Item/ItemFactory'
|
||||
import axios, { AxiosInstance } from 'axios'
|
||||
import { UuidFilter } from '../Domain/Item/SaveRule/UuidFilter'
|
||||
import { ContentTypeFilter } from '../Domain/Item/SaveRule/ContentTypeFilter'
|
||||
import { ContentFilter } from '../Domain/Item/SaveRule/ContentFilter'
|
||||
import {
|
||||
RedisDomainEventPublisher,
|
||||
RedisDomainEventSubscriberFactory,
|
||||
RedisEventMessageHandler,
|
||||
SNSDomainEventPublisher,
|
||||
SQSDomainEventSubscriberFactory,
|
||||
SQSEventMessageHandler,
|
||||
SQSNewRelicEventMessageHandler,
|
||||
} from '@standardnotes/domain-events-infra'
|
||||
import { EmailBackupRequestedEventHandler } from '../Domain/Handler/EmailBackupRequestedEventHandler'
|
||||
import { CloudBackupRequestedEventHandler } from '../Domain/Handler/CloudBackupRequestedEventHandler'
|
||||
import { CheckIntegrity } from '../Domain/UseCase/CheckIntegrity/CheckIntegrity'
|
||||
import { GetItem } from '../Domain/UseCase/GetItem/GetItem'
|
||||
import { ItemTransferCalculatorInterface } from '../Domain/Item/ItemTransferCalculatorInterface'
|
||||
import { ItemTransferCalculator } from '../Domain/Item/ItemTransferCalculator'
|
||||
import { ProjectorInterface } from '../Projection/ProjectorInterface'
|
||||
import { SavedItemProjection } from '../Projection/SavedItemProjection'
|
||||
import { SavedItemProjector } from '../Projection/SavedItemProjector'
|
||||
import { ItemProjection } from '../Projection/ItemProjection'
|
||||
import { RevisionProjection } from '../Projection/RevisionProjection'
|
||||
import { ItemConflict } from '../Domain/Item/ItemConflict'
|
||||
import { ItemConflictProjection } from '../Projection/ItemConflictProjection'
|
||||
import { AppDataSource } from './DataSource'
|
||||
import { RevisionRepositoryInterface } from '../Domain/Revision/RevisionRepositoryInterface'
|
||||
import { ItemRepositoryInterface } from '../Domain/Item/ItemRepositoryInterface'
|
||||
import { Repository } from 'typeorm'
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const newrelicWinstonEnricher = require('@newrelic/winston-enricher')
|
||||
|
||||
export class ContainerConfigLoader {
|
||||
private readonly DEFAULT_CONTENT_SIZE_TRANSFER_LIMIT = 10_000_000
|
||||
|
||||
async load(): Promise<Container> {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const container = new Container()
|
||||
|
||||
await AppDataSource.initialize()
|
||||
|
||||
const redisUrl = env.get('REDIS_URL')
|
||||
const isRedisInClusterMode = redisUrl.indexOf(',') > 0
|
||||
let redis
|
||||
if (isRedisInClusterMode) {
|
||||
redis = new Redis.Cluster(redisUrl.split(','))
|
||||
} else {
|
||||
redis = new Redis(redisUrl)
|
||||
}
|
||||
|
||||
container.bind(TYPES.Redis).toConstantValue(redis)
|
||||
|
||||
const winstonFormatters = [winston.format.splat(), winston.format.json()]
|
||||
if (env.get('NEW_RELIC_ENABLED', true) === 'true') {
|
||||
winstonFormatters.push(newrelicWinstonEnricher())
|
||||
}
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: env.get('LOG_LEVEL') || 'info',
|
||||
format: winston.format.combine(...winstonFormatters),
|
||||
transports: [new winston.transports.Console({ level: env.get('LOG_LEVEL') || 'info' })],
|
||||
})
|
||||
container.bind<winston.Logger>(TYPES.Logger).toConstantValue(logger)
|
||||
|
||||
if (env.get('SNS_AWS_REGION', true)) {
|
||||
container.bind<AWS.SNS>(TYPES.SNS).toConstantValue(
|
||||
new AWS.SNS({
|
||||
apiVersion: 'latest',
|
||||
region: env.get('SNS_AWS_REGION', true),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
if (env.get('SQS_AWS_REGION', true)) {
|
||||
container.bind<AWS.SQS>(TYPES.SQS).toConstantValue(
|
||||
new AWS.SQS({
|
||||
apiVersion: 'latest',
|
||||
region: env.get('SQS_AWS_REGION', true),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
let s3Client = undefined
|
||||
if (env.get('S3_AWS_REGION', true)) {
|
||||
s3Client = new AWS.S3({
|
||||
apiVersion: 'latest',
|
||||
region: env.get('S3_AWS_REGION', true),
|
||||
})
|
||||
}
|
||||
container.bind<AWS.S3 | undefined>(TYPES.S3).toConstantValue(s3Client)
|
||||
|
||||
// Repositories
|
||||
container.bind<RevisionRepositoryInterface>(TYPES.RevisionRepository).to(MySQLRevisionRepository)
|
||||
container.bind<ItemRepositoryInterface>(TYPES.ItemRepository).to(MySQLItemRepository)
|
||||
|
||||
// ORM
|
||||
container
|
||||
.bind<Repository<Revision>>(TYPES.ORMRevisionRepository)
|
||||
.toConstantValue(AppDataSource.getRepository(Revision))
|
||||
container.bind<Repository<Item>>(TYPES.ORMItemRepository).toConstantValue(AppDataSource.getRepository(Item))
|
||||
|
||||
// Middleware
|
||||
container.bind<AuthMiddleware>(TYPES.AuthMiddleware).to(AuthMiddleware)
|
||||
|
||||
// Projectors
|
||||
container.bind<ProjectorInterface<Revision, RevisionProjection>>(TYPES.RevisionProjector).to(RevisionProjector)
|
||||
container.bind<ProjectorInterface<Item, ItemProjection>>(TYPES.ItemProjector).to(ItemProjector)
|
||||
container.bind<ProjectorInterface<Item, SavedItemProjection>>(TYPES.SavedItemProjector).to(SavedItemProjector)
|
||||
container
|
||||
.bind<ProjectorInterface<ItemConflict, ItemConflictProjection>>(TYPES.ItemConflictProjector)
|
||||
.to(ItemConflictProjector)
|
||||
|
||||
// env vars
|
||||
container.bind(TYPES.REDIS_URL).toConstantValue(env.get('REDIS_URL'))
|
||||
container.bind(TYPES.SNS_TOPIC_ARN).toConstantValue(env.get('SNS_TOPIC_ARN', true))
|
||||
container.bind(TYPES.SNS_AWS_REGION).toConstantValue(env.get('SNS_AWS_REGION', true))
|
||||
container.bind(TYPES.SQS_QUEUE_URL).toConstantValue(env.get('SQS_QUEUE_URL', true))
|
||||
container.bind(TYPES.REDIS_EVENTS_CHANNEL).toConstantValue(env.get('REDIS_EVENTS_CHANNEL'))
|
||||
container.bind(TYPES.AUTH_JWT_SECRET).toConstantValue(env.get('AUTH_JWT_SECRET'))
|
||||
container
|
||||
.bind(TYPES.INTERNAL_DNS_REROUTE_ENABLED)
|
||||
.toConstantValue(env.get('INTERNAL_DNS_REROUTE_ENABLED', true) === 'true')
|
||||
container.bind(TYPES.EXTENSIONS_SERVER_URL).toConstantValue(env.get('EXTENSIONS_SERVER_URL', true))
|
||||
container.bind(TYPES.AUTH_SERVER_URL).toConstantValue(env.get('AUTH_SERVER_URL'))
|
||||
container.bind(TYPES.S3_AWS_REGION).toConstantValue(env.get('S3_AWS_REGION', true))
|
||||
container.bind(TYPES.S3_BACKUP_BUCKET_NAME).toConstantValue(env.get('S3_BACKUP_BUCKET_NAME', true))
|
||||
container.bind(TYPES.EMAIL_ATTACHMENT_MAX_BYTE_SIZE).toConstantValue(env.get('EMAIL_ATTACHMENT_MAX_BYTE_SIZE'))
|
||||
container.bind(TYPES.REVISIONS_FREQUENCY).toConstantValue(env.get('REVISIONS_FREQUENCY'))
|
||||
container.bind(TYPES.NEW_RELIC_ENABLED).toConstantValue(env.get('NEW_RELIC_ENABLED', true))
|
||||
container.bind(TYPES.VERSION).toConstantValue(env.get('VERSION'))
|
||||
container
|
||||
.bind(TYPES.CONTENT_SIZE_TRANSFER_LIMIT)
|
||||
.toConstantValue(env.get('CONTENT_SIZE_TRANSFER_LIMIT', true) ?? this.DEFAULT_CONTENT_SIZE_TRANSFER_LIMIT)
|
||||
|
||||
// use cases
|
||||
container.bind<SyncItems>(TYPES.SyncItems).to(SyncItems)
|
||||
container.bind<CheckIntegrity>(TYPES.CheckIntegrity).to(CheckIntegrity)
|
||||
container.bind<GetItem>(TYPES.GetItem).to(GetItem)
|
||||
|
||||
// Handlers
|
||||
container.bind<ItemsSyncedEventHandler>(TYPES.ItemsSyncedEventHandler).to(ItemsSyncedEventHandler)
|
||||
container
|
||||
.bind<EmailArchiveExtensionSyncedEventHandler>(TYPES.EmailArchiveExtensionSyncedEventHandler)
|
||||
.to(EmailArchiveExtensionSyncedEventHandler)
|
||||
container
|
||||
.bind<DuplicateItemSyncedEventHandler>(TYPES.DuplicateItemSyncedEventHandler)
|
||||
.to(DuplicateItemSyncedEventHandler)
|
||||
container
|
||||
.bind<AccountDeletionRequestedEventHandler>(TYPES.AccountDeletionRequestedEventHandler)
|
||||
.to(AccountDeletionRequestedEventHandler)
|
||||
container
|
||||
.bind<EmailBackupRequestedEventHandler>(TYPES.EmailBackupRequestedEventHandler)
|
||||
.to(EmailBackupRequestedEventHandler)
|
||||
container
|
||||
.bind<CloudBackupRequestedEventHandler>(TYPES.CloudBackupRequestedEventHandler)
|
||||
.to(CloudBackupRequestedEventHandler)
|
||||
|
||||
// Services
|
||||
container.bind<ContentDecoder>(TYPES.ContentDecoder).to(ContentDecoder)
|
||||
container.bind<DomainEventFactoryInterface>(TYPES.DomainEventFactory).to(DomainEventFactory)
|
||||
container.bind<AxiosInstance>(TYPES.HTTPClient).toConstantValue(axios.create())
|
||||
container.bind<ItemServiceInterface>(TYPES.ItemService).to(ItemService)
|
||||
container.bind<ItemTransferCalculatorInterface>(TYPES.ItemTransferCalculator).to(ItemTransferCalculator)
|
||||
container.bind<TimerInterface>(TYPES.Timer).toConstantValue(new Timer())
|
||||
container.bind<SyncResponseFactory20161215>(TYPES.SyncResponseFactory20161215).to(SyncResponseFactory20161215)
|
||||
container.bind<SyncResponseFactory20200115>(TYPES.SyncResponseFactory20200115).to(SyncResponseFactory20200115)
|
||||
container
|
||||
.bind<SyncResponseFactoryResolverInterface>(TYPES.SyncResponseFactoryResolver)
|
||||
.to(SyncResponseFactoryResolver)
|
||||
container.bind<AuthHttpServiceInterface>(TYPES.AuthHttpService).to(AuthHttpService)
|
||||
container.bind<ExtensionsHttpServiceInterface>(TYPES.ExtensionsHttpService).to(ExtensionsHttpService)
|
||||
container.bind<ItemBackupServiceInterface>(TYPES.ItemBackupService).to(S3ItemBackupService)
|
||||
container.bind<RevisionServiceInterface>(TYPES.RevisionService).to(RevisionService)
|
||||
const periodKeyGenerator = new PeriodKeyGenerator()
|
||||
container
|
||||
.bind<AnalyticsStoreInterface>(TYPES.AnalyticsStore)
|
||||
.toConstantValue(new RedisAnalyticsStore(periodKeyGenerator, container.get(TYPES.Redis)))
|
||||
container
|
||||
.bind<StatisticsStoreInterface>(TYPES.StatisticsStore)
|
||||
.toConstantValue(new RedisStatisticsStore(periodKeyGenerator, container.get(TYPES.Redis)))
|
||||
|
||||
if (env.get('SNS_TOPIC_ARN', true)) {
|
||||
container
|
||||
.bind<SNSDomainEventPublisher>(TYPES.DomainEventPublisher)
|
||||
.toConstantValue(new SNSDomainEventPublisher(container.get(TYPES.SNS), container.get(TYPES.SNS_TOPIC_ARN)))
|
||||
} else {
|
||||
container
|
||||
.bind<RedisDomainEventPublisher>(TYPES.DomainEventPublisher)
|
||||
.toConstantValue(
|
||||
new RedisDomainEventPublisher(container.get(TYPES.Redis), container.get(TYPES.REDIS_EVENTS_CHANNEL)),
|
||||
)
|
||||
}
|
||||
|
||||
const eventHandlers: Map<string, DomainEventHandlerInterface> = new Map([
|
||||
['DUPLICATE_ITEM_SYNCED', container.get(TYPES.DuplicateItemSyncedEventHandler)],
|
||||
['ITEMS_SYNCED', container.get(TYPES.ItemsSyncedEventHandler)],
|
||||
['EMAIL_ARCHIVE_EXTENSION_SYNCED', container.get(TYPES.EmailArchiveExtensionSyncedEventHandler)],
|
||||
['ACCOUNT_DELETION_REQUESTED', container.get(TYPES.AccountDeletionRequestedEventHandler)],
|
||||
['EMAIL_BACKUP_REQUESTED', container.get(TYPES.EmailBackupRequestedEventHandler)],
|
||||
['CLOUD_BACKUP_REQUESTED', container.get(TYPES.CloudBackupRequestedEventHandler)],
|
||||
])
|
||||
|
||||
if (env.get('SQS_QUEUE_URL', true)) {
|
||||
container
|
||||
.bind<DomainEventMessageHandlerInterface>(TYPES.DomainEventMessageHandler)
|
||||
.toConstantValue(
|
||||
env.get('NEW_RELIC_ENABLED', true) === 'true'
|
||||
? new SQSNewRelicEventMessageHandler(eventHandlers, container.get(TYPES.Logger))
|
||||
: new SQSEventMessageHandler(eventHandlers, container.get(TYPES.Logger)),
|
||||
)
|
||||
container
|
||||
.bind<DomainEventSubscriberFactoryInterface>(TYPES.DomainEventSubscriberFactory)
|
||||
.toConstantValue(
|
||||
new SQSDomainEventSubscriberFactory(
|
||||
container.get(TYPES.SQS),
|
||||
container.get(TYPES.SQS_QUEUE_URL),
|
||||
container.get(TYPES.DomainEventMessageHandler),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
container
|
||||
.bind<DomainEventMessageHandlerInterface>(TYPES.DomainEventMessageHandler)
|
||||
.toConstantValue(new RedisEventMessageHandler(eventHandlers, container.get(TYPES.Logger)))
|
||||
container
|
||||
.bind<DomainEventSubscriberFactoryInterface>(TYPES.DomainEventSubscriberFactory)
|
||||
.toConstantValue(
|
||||
new RedisDomainEventSubscriberFactory(
|
||||
container.get(TYPES.Redis),
|
||||
container.get(TYPES.DomainEventMessageHandler),
|
||||
container.get(TYPES.REDIS_EVENTS_CHANNEL),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
container.bind<ItemFactoryInterface>(TYPES.ItemFactory).to(ItemFactory)
|
||||
|
||||
container.bind<OwnershipFilter>(TYPES.OwnershipFilter).to(OwnershipFilter)
|
||||
container.bind<TimeDifferenceFilter>(TYPES.TimeDifferenceFilter).to(TimeDifferenceFilter)
|
||||
container.bind<UuidFilter>(TYPES.UuidFilter).to(UuidFilter)
|
||||
container.bind<ContentTypeFilter>(TYPES.ContentTypeFilter).to(ContentTypeFilter)
|
||||
container.bind<ContentFilter>(TYPES.ContentFilter).to(ContentFilter)
|
||||
|
||||
container
|
||||
.bind<ItemSaveValidatorInterface>(TYPES.ItemSaveValidator)
|
||||
.toConstantValue(
|
||||
new ItemSaveValidator([
|
||||
container.get(TYPES.OwnershipFilter),
|
||||
container.get(TYPES.TimeDifferenceFilter),
|
||||
container.get(TYPES.UuidFilter),
|
||||
container.get(TYPES.ContentTypeFilter),
|
||||
container.get(TYPES.ContentFilter),
|
||||
]),
|
||||
)
|
||||
|
||||
return container
|
||||
}
|
||||
}
|
42
packages/syncing-server/src/Bootstrap/DataSource.ts
Normal file
42
packages/syncing-server/src/Bootstrap/DataSource.ts
Normal file
|
@ -0,0 +1,42 @@
|
|||
import { DataSource, LoggerOptions } from 'typeorm'
|
||||
import { Item } from '../Domain/Item/Item'
|
||||
import { Revision } from '../Domain/Revision/Revision'
|
||||
import { Env } from './Env'
|
||||
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const maxQueryExecutionTime = env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
|
||||
? +env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
|
||||
: 45_000
|
||||
|
||||
export const AppDataSource = new DataSource({
|
||||
type: 'mysql',
|
||||
supportBigNumbers: true,
|
||||
bigNumberStrings: false,
|
||||
maxQueryExecutionTime,
|
||||
replication: {
|
||||
master: {
|
||||
host: env.get('DB_HOST'),
|
||||
port: parseInt(env.get('DB_PORT')),
|
||||
username: env.get('DB_USERNAME'),
|
||||
password: env.get('DB_PASSWORD'),
|
||||
database: env.get('DB_DATABASE'),
|
||||
},
|
||||
slaves: [
|
||||
{
|
||||
host: env.get('DB_REPLICA_HOST'),
|
||||
port: parseInt(env.get('DB_PORT')),
|
||||
username: env.get('DB_USERNAME'),
|
||||
password: env.get('DB_PASSWORD'),
|
||||
database: env.get('DB_DATABASE'),
|
||||
},
|
||||
],
|
||||
removeNodeErrorCount: 10,
|
||||
restoreNodeTimeout: 5,
|
||||
},
|
||||
entities: [Item, Revision],
|
||||
migrations: [env.get('DB_MIGRATIONS_PATH', true) ?? 'dist/migrations/*.js'],
|
||||
migrationsRun: true,
|
||||
logging: <LoggerOptions>env.get('DB_DEBUG_LEVEL'),
|
||||
})
|
24
packages/syncing-server/src/Bootstrap/Env.ts
Normal file
24
packages/syncing-server/src/Bootstrap/Env.ts
Normal file
|
@ -0,0 +1,24 @@
|
|||
import { config, DotenvParseOutput } from 'dotenv'
|
||||
import { injectable } from 'inversify'
|
||||
|
||||
@injectable()
|
||||
export class Env {
|
||||
private env?: DotenvParseOutput
|
||||
|
||||
public load(): void {
|
||||
const output = config()
|
||||
this.env = <DotenvParseOutput>output.parsed
|
||||
}
|
||||
|
||||
public get(key: string, optional = false): string {
|
||||
if (!this.env) {
|
||||
this.load()
|
||||
}
|
||||
|
||||
if (!process.env[key] && !optional) {
|
||||
throw new Error(`Environment variable ${key} not set`)
|
||||
}
|
||||
|
||||
return <string>process.env[key]
|
||||
}
|
||||
}
|
78
packages/syncing-server/src/Bootstrap/Types.ts
Normal file
78
packages/syncing-server/src/Bootstrap/Types.ts
Normal file
|
@ -0,0 +1,78 @@
|
|||
const TYPES = {
|
||||
DBConnection: Symbol.for('DBConnection'),
|
||||
Logger: Symbol.for('Logger'),
|
||||
Redis: Symbol.for('Redis'),
|
||||
SNS: Symbol.for('SNS'),
|
||||
SQS: Symbol.for('SQS'),
|
||||
S3: Symbol.for('S3'),
|
||||
// Repositories
|
||||
RevisionRepository: Symbol.for('RevisionRepository'),
|
||||
ItemRepository: Symbol.for('ItemRepository'),
|
||||
// ORM
|
||||
ORMRevisionRepository: Symbol.for('ORMRevisionRepository'),
|
||||
ORMItemRepository: Symbol.for('ORMItemRepository'),
|
||||
// Middleware
|
||||
AuthMiddleware: Symbol.for('AuthMiddleware'),
|
||||
// Projectors
|
||||
RevisionProjector: Symbol.for('RevisionProjector'),
|
||||
ItemProjector: Symbol.for('ItemProjector'),
|
||||
SavedItemProjector: Symbol.for('SavedItemProjector'),
|
||||
ItemConflictProjector: Symbol.for('ItemConflictProjector'),
|
||||
// env vars
|
||||
REDIS_URL: Symbol.for('REDIS_URL'),
|
||||
SNS_TOPIC_ARN: Symbol.for('SNS_TOPIC_ARN'),
|
||||
SNS_AWS_REGION: Symbol.for('SNS_AWS_REGION'),
|
||||
SQS_QUEUE_URL: Symbol.for('SQS_QUEUE_URL'),
|
||||
SQS_AWS_REGION: Symbol.for('SQS_AWS_REGION'),
|
||||
REDIS_EVENTS_CHANNEL: Symbol.for('REDIS_EVENTS_CHANNEL'),
|
||||
AUTH_JWT_SECRET: Symbol.for('AUTH_JWT_SECRET'),
|
||||
INTERNAL_DNS_REROUTE_ENABLED: Symbol.for('INTERNAL_DNS_REROUTE_ENABLED'),
|
||||
EXTENSIONS_SERVER_URL: Symbol.for('EXTENSIONS_SERVER_URL'),
|
||||
AUTH_SERVER_URL: Symbol.for('AUTH_SERVER_URL'),
|
||||
S3_AWS_REGION: Symbol.for('S3_AWS_REGION'),
|
||||
S3_BACKUP_BUCKET_NAME: Symbol.for('S3_BACKUP_BUCKET_NAME'),
|
||||
EMAIL_ATTACHMENT_MAX_BYTE_SIZE: Symbol.for('EMAIL_ATTACHMENT_MAX_BYTE_SIZE'),
|
||||
REVISIONS_FREQUENCY: Symbol.for('REVISIONS_FREQUENCY'),
|
||||
NEW_RELIC_ENABLED: Symbol.for('NEW_RELIC_ENABLED'),
|
||||
VERSION: Symbol.for('VERSION'),
|
||||
CONTENT_SIZE_TRANSFER_LIMIT: Symbol.for('CONTENT_SIZE_TRANSFER_LIMIT'),
|
||||
// use cases
|
||||
SyncItems: Symbol.for('SyncItems'),
|
||||
CheckIntegrity: Symbol.for('CheckIntegrity'),
|
||||
GetItem: Symbol.for('GetItem'),
|
||||
// Handlers
|
||||
AccountDeletionRequestedEventHandler: Symbol.for('AccountDeletionRequestedEventHandler'),
|
||||
DuplicateItemSyncedEventHandler: Symbol.for('DuplicateItemSyncedEventHandler'),
|
||||
ItemsSyncedEventHandler: Symbol.for('ItemsSyncedEventHandler'),
|
||||
EmailArchiveExtensionSyncedEventHandler: Symbol.for('EmailArchiveExtensionSyncedEventHandler'),
|
||||
EmailBackupRequestedEventHandler: Symbol.for('EmailBackupRequestedEventHandler'),
|
||||
CloudBackupRequestedEventHandler: Symbol.for('CloudBackupRequestedEventHandler'),
|
||||
// Services
|
||||
ContentDecoder: Symbol.for('ContentDecoder'),
|
||||
DomainEventPublisher: Symbol.for('DomainEventPublisher'),
|
||||
DomainEventSubscriberFactory: Symbol.for('DomainEventSubscriberFactory'),
|
||||
DomainEventFactory: Symbol.for('DomainEventFactory'),
|
||||
DomainEventMessageHandler: Symbol.for('DomainEventMessageHandler'),
|
||||
HTTPClient: Symbol.for('HTTPClient'),
|
||||
ItemService: Symbol.for('ItemService'),
|
||||
Timer: Symbol.for('Timer'),
|
||||
SyncResponseFactory20161215: Symbol.for('SyncResponseFactory20161215'),
|
||||
SyncResponseFactory20200115: Symbol.for('SyncResponseFactory20200115'),
|
||||
SyncResponseFactoryResolver: Symbol.for('SyncResponseFactoryResolver'),
|
||||
AuthHttpService: Symbol.for('AuthHttpService'),
|
||||
ExtensionsHttpService: Symbol.for('ExtensionsHttpService'),
|
||||
ItemBackupService: Symbol.for('ItemBackupService'),
|
||||
RevisionService: Symbol.for('RevisionService'),
|
||||
ItemSaveValidator: Symbol.for('ItemSaveValidator'),
|
||||
OwnershipFilter: Symbol.for('OwnershipFilter'),
|
||||
TimeDifferenceFilter: Symbol.for('TimeDifferenceFilter'),
|
||||
UuidFilter: Symbol.for('UuidFilter'),
|
||||
ContentTypeFilter: Symbol.for('ContentTypeFilter'),
|
||||
ContentFilter: Symbol.for('ContentFilter'),
|
||||
ItemFactory: Symbol.for('ItemFactory'),
|
||||
AnalyticsStore: Symbol.for('AnalyticsStore'),
|
||||
StatisticsStore: Symbol.for('StatisticsStore'),
|
||||
ItemTransferCalculator: Symbol.for('ItemTransferCalculator'),
|
||||
}
|
||||
|
||||
export default TYPES
|
137
packages/syncing-server/src/Controller/AuthMiddleware.spec.ts
Normal file
137
packages/syncing-server/src/Controller/AuthMiddleware.spec.ts
Normal file
|
@ -0,0 +1,137 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import * as winston from 'winston'
|
||||
|
||||
import { AuthMiddleware } from './AuthMiddleware'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { sign } from 'jsonwebtoken'
|
||||
import { RoleName } from '@standardnotes/common'
|
||||
|
||||
describe('AuthMiddleware', () => {
|
||||
let logger: winston.Logger
|
||||
const jwtSecret = 'auth_jwt_secret'
|
||||
let request: Request
|
||||
let response: Response
|
||||
let next: NextFunction
|
||||
|
||||
const createMiddleware = () => new AuthMiddleware(jwtSecret, logger)
|
||||
|
||||
beforeEach(() => {
|
||||
logger = {} as jest.Mocked<winston.Logger>
|
||||
logger.info = jest.fn()
|
||||
logger.debug = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
logger.error = jest.fn()
|
||||
|
||||
request = {
|
||||
headers: {},
|
||||
} as jest.Mocked<Request>
|
||||
request.header = jest.fn()
|
||||
response = {
|
||||
locals: {},
|
||||
} as jest.Mocked<Response>
|
||||
response.status = jest.fn().mockReturnThis()
|
||||
response.send = jest.fn()
|
||||
next = jest.fn()
|
||||
})
|
||||
|
||||
it('should authorize user from an auth JWT token if present', async () => {
|
||||
const authToken = sign(
|
||||
{
|
||||
user: { uuid: '123' },
|
||||
session: { uuid: '234' },
|
||||
roles: [
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
name: RoleName.CoreUser,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
name: RoleName.ProUser,
|
||||
},
|
||||
],
|
||||
analyticsId: 123,
|
||||
permissions: [],
|
||||
},
|
||||
jwtSecret,
|
||||
{ algorithm: 'HS256' },
|
||||
)
|
||||
|
||||
request.header = jest.fn().mockReturnValue(authToken)
|
||||
|
||||
await createMiddleware().handler(request, response, next)
|
||||
|
||||
expect(response.locals.user).toEqual({ uuid: '123' })
|
||||
expect(response.locals.roleNames).toEqual(['CORE_USER', 'PRO_USER'])
|
||||
expect(response.locals.session).toEqual({ uuid: '234' })
|
||||
expect(response.locals.readOnlyAccess).toBeFalsy()
|
||||
expect(response.locals.analyticsId).toEqual(123)
|
||||
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should authorize user from an auth JWT token if present with read only access', async () => {
|
||||
const authToken = sign(
|
||||
{
|
||||
user: { uuid: '123' },
|
||||
session: {
|
||||
uuid: '234',
|
||||
readonly_access: true,
|
||||
},
|
||||
roles: [
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
name: RoleName.CoreUser,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
name: RoleName.ProUser,
|
||||
},
|
||||
],
|
||||
analyticsId: 123,
|
||||
permissions: [],
|
||||
},
|
||||
jwtSecret,
|
||||
{ algorithm: 'HS256' },
|
||||
)
|
||||
|
||||
request.header = jest.fn().mockReturnValue(authToken)
|
||||
|
||||
await createMiddleware().handler(request, response, next)
|
||||
|
||||
expect(response.locals.user).toEqual({ uuid: '123' })
|
||||
expect(response.locals.roleNames).toEqual(['CORE_USER', 'PRO_USER'])
|
||||
expect(response.locals.session).toEqual({ uuid: '234', readonly_access: true })
|
||||
expect(response.locals.readOnlyAccess).toBeTruthy()
|
||||
expect(response.locals.analyticsId).toEqual(123)
|
||||
|
||||
expect(next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not authorize user from an auth JWT token if it is invalid', async () => {
|
||||
const authToken = sign(
|
||||
{
|
||||
user: { uuid: '123' },
|
||||
session: { uuid: '234' },
|
||||
roles: [],
|
||||
permissions: [],
|
||||
},
|
||||
jwtSecret,
|
||||
{ algorithm: 'HS256', notBefore: '2 days' },
|
||||
)
|
||||
|
||||
request.header = jest.fn().mockReturnValue(authToken)
|
||||
|
||||
await createMiddleware().handler(request, response, next)
|
||||
|
||||
expect(response.status).toHaveBeenCalledWith(401)
|
||||
expect(next).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not authorize if authorization header is missing', async () => {
|
||||
await createMiddleware().handler(request, response, next)
|
||||
|
||||
expect(response.status).toHaveBeenCalledWith(401)
|
||||
expect(next).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
50
packages/syncing-server/src/Controller/AuthMiddleware.ts
Normal file
50
packages/syncing-server/src/Controller/AuthMiddleware.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { NextFunction, Request, Response } from 'express'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { BaseMiddleware } from 'inversify-express-utils'
|
||||
import { verify } from 'jsonwebtoken'
|
||||
import { CrossServiceTokenData } from '@standardnotes/auth'
|
||||
import * as winston from 'winston'
|
||||
import TYPES from '../Bootstrap/Types'
|
||||
|
||||
@injectable()
|
||||
export class AuthMiddleware extends BaseMiddleware {
|
||||
constructor(
|
||||
@inject(TYPES.AUTH_JWT_SECRET) private authJWTSecret: string,
|
||||
@inject(TYPES.Logger) private logger: winston.Logger,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
async handler(request: Request, response: Response, next: NextFunction): Promise<void> {
|
||||
try {
|
||||
if (!request.header('X-Auth-Token')) {
|
||||
return this.sendInvalidAuthResponse(response)
|
||||
}
|
||||
|
||||
const authToken = <string>request.header('X-Auth-Token')
|
||||
|
||||
const decodedToken = <CrossServiceTokenData>verify(authToken, this.authJWTSecret, { algorithms: ['HS256'] })
|
||||
|
||||
response.locals.user = decodedToken.user
|
||||
response.locals.roleNames = decodedToken.roles.map((role) => role.name)
|
||||
response.locals.session = decodedToken.session
|
||||
response.locals.readOnlyAccess = decodedToken.session?.readonly_access ?? false
|
||||
response.locals.analyticsId = decodedToken.analyticsId
|
||||
|
||||
return next()
|
||||
} catch (error) {
|
||||
this.logger.error(`Could not verify JWT Auth Token ${(error as Error).message}`)
|
||||
|
||||
return this.sendInvalidAuthResponse(response)
|
||||
}
|
||||
}
|
||||
|
||||
private sendInvalidAuthResponse(response: Response) {
|
||||
response.status(401).send({
|
||||
error: {
|
||||
tag: 'invalid-auth',
|
||||
message: 'Invalid login credentials.',
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
import { controller, httpGet } from 'inversify-express-utils'
|
||||
|
||||
@controller('/healthcheck')
|
||||
export class HealthCheckController {
|
||||
@httpGet('/')
|
||||
public async get(): Promise<string> {
|
||||
return 'OK'
|
||||
}
|
||||
}
|
240
packages/syncing-server/src/Controller/ItemsController.spec.ts
Normal file
240
packages/syncing-server/src/Controller/ItemsController.spec.ts
Normal file
|
@ -0,0 +1,240 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import * as express from 'express'
|
||||
import { ContentType } from '@standardnotes/common'
|
||||
|
||||
import { ItemsController } from './ItemsController'
|
||||
import { results } from 'inversify-express-utils'
|
||||
import { SyncItems } from '../Domain/UseCase/SyncItems'
|
||||
import { ApiVersion } from '../Domain/Api/ApiVersion'
|
||||
import { SyncResponseFactoryResolverInterface } from '../Domain/Item/SyncResponse/SyncResponseFactoryResolverInterface'
|
||||
import { SyncResponseFactoryInterface } from '../Domain/Item/SyncResponse/SyncResponseFactoryInterface'
|
||||
import { SyncResponse20200115 } from '../Domain/Item/SyncResponse/SyncResponse20200115'
|
||||
import { CheckIntegrity } from '../Domain/UseCase/CheckIntegrity/CheckIntegrity'
|
||||
import { GetItem } from '../Domain/UseCase/GetItem/GetItem'
|
||||
import { Item } from '../Domain/Item/Item'
|
||||
import { ProjectorInterface } from '../Projection/ProjectorInterface'
|
||||
import { ItemProjection } from '../Projection/ItemProjection'
|
||||
|
||||
describe('ItemsController', () => {
|
||||
let syncItems: SyncItems
|
||||
let checkIntegrity: CheckIntegrity
|
||||
let getItem: GetItem
|
||||
let itemProjector: ProjectorInterface<Item, ItemProjection>
|
||||
let request: express.Request
|
||||
let response: express.Response
|
||||
let syncResponceFactoryResolver: SyncResponseFactoryResolverInterface
|
||||
let syncResponseFactory: SyncResponseFactoryInterface
|
||||
let syncResponse: SyncResponse20200115
|
||||
|
||||
const createController = () =>
|
||||
new ItemsController(syncItems, checkIntegrity, getItem, itemProjector, syncResponceFactoryResolver)
|
||||
|
||||
beforeEach(() => {
|
||||
itemProjector = {} as jest.Mocked<ProjectorInterface<Item, ItemProjection>>
|
||||
itemProjector.projectFull = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
|
||||
syncItems = {} as jest.Mocked<SyncItems>
|
||||
syncItems.execute = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
|
||||
checkIntegrity = {} as jest.Mocked<CheckIntegrity>
|
||||
checkIntegrity.execute = jest.fn().mockReturnValue({ mismatches: [{ uuid: '1-2-3', updated_at_timestamp: 2 }] })
|
||||
|
||||
getItem = {} as jest.Mocked<GetItem>
|
||||
getItem.execute = jest.fn().mockReturnValue({ success: true, item: {} as jest.Mocked<Item> })
|
||||
|
||||
request = {
|
||||
headers: {},
|
||||
body: {},
|
||||
params: {},
|
||||
} as jest.Mocked<express.Request>
|
||||
|
||||
request.body.api = ApiVersion.v20200115
|
||||
request.body.sync_token = 'MjoxNjE3MTk1MzQyLjc1ODEyMTc='
|
||||
request.body.limit = 150
|
||||
request.body.compute_integrity = false
|
||||
request.headers['user-agent'] = 'Google Chrome'
|
||||
request.body.items = [
|
||||
{
|
||||
content: 'test',
|
||||
content_type: ContentType.Note,
|
||||
created_at: '2021-02-19T11:35:45.655Z',
|
||||
deleted: false,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'test',
|
||||
items_key_id: 'test',
|
||||
updated_at: '2021-02-19T11:35:45.655Z',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
]
|
||||
|
||||
response = {
|
||||
locals: {},
|
||||
} as jest.Mocked<express.Response>
|
||||
response.locals.user = {
|
||||
uuid: '123',
|
||||
}
|
||||
response.locals.analyticsId = 123
|
||||
|
||||
syncResponse = {} as jest.Mocked<SyncResponse20200115>
|
||||
|
||||
syncResponseFactory = {} as jest.Mocked<SyncResponseFactoryInterface>
|
||||
syncResponseFactory.createResponse = jest.fn().mockReturnValue(syncResponse)
|
||||
|
||||
syncResponceFactoryResolver = {} as jest.Mocked<SyncResponseFactoryResolverInterface>
|
||||
syncResponceFactoryResolver.resolveSyncResponseFactoryVersion = jest.fn().mockReturnValue(syncResponseFactory)
|
||||
})
|
||||
|
||||
it('should get a single item', async () => {
|
||||
request.params.uuid = '1-2-3'
|
||||
const httpResponse = <results.JsonResult>await createController().getSingleItem(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(getItem.execute).toHaveBeenCalledWith({
|
||||
itemUuid: '1-2-3',
|
||||
userUuid: '123',
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
})
|
||||
|
||||
it('should return 404 on a missing single item', async () => {
|
||||
request.params.uuid = '1-2-3'
|
||||
getItem.execute = jest.fn().mockReturnValue({ success: false })
|
||||
|
||||
const httpResponse = <results.NotFoundResult>await createController().getSingleItem(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(getItem.execute).toHaveBeenCalledWith({
|
||||
itemUuid: '1-2-3',
|
||||
userUuid: '123',
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(404)
|
||||
})
|
||||
|
||||
it('should check items integrity', async () => {
|
||||
request.body.integrityPayloads = [
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
updated_at_timestamp: 1,
|
||||
},
|
||||
]
|
||||
|
||||
const httpResponse = <results.JsonResult>await createController().checkItemsIntegrity(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(checkIntegrity.execute).toHaveBeenCalledWith({
|
||||
integrityPayloads: [
|
||||
{
|
||||
updated_at_timestamp: 1,
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
userUuid: '123',
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
expect(await result.content.readAsStringAsync()).toEqual(
|
||||
'{"mismatches":[{"uuid":"1-2-3","updated_at_timestamp":2}]}',
|
||||
)
|
||||
})
|
||||
|
||||
it('should check items integrity with missing request parameter', async () => {
|
||||
const httpResponse = <results.JsonResult>await createController().checkItemsIntegrity(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(checkIntegrity.execute).toHaveBeenCalledWith({
|
||||
integrityPayloads: [],
|
||||
userUuid: '123',
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
expect(await result.content.readAsStringAsync()).toEqual(
|
||||
'{"mismatches":[{"uuid":"1-2-3","updated_at_timestamp":2}]}',
|
||||
)
|
||||
})
|
||||
|
||||
it('should sync items', async () => {
|
||||
const httpResponse = <results.JsonResult>await createController().sync(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(syncItems.execute).toHaveBeenCalledWith({
|
||||
apiVersion: '20200115',
|
||||
computeIntegrityHash: false,
|
||||
itemHashes: [
|
||||
{
|
||||
content: 'test',
|
||||
content_type: 'Note',
|
||||
created_at: '2021-02-19T11:35:45.655Z',
|
||||
deleted: false,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'test',
|
||||
items_key_id: 'test',
|
||||
updated_at: '2021-02-19T11:35:45.655Z',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
limit: 150,
|
||||
syncToken: 'MjoxNjE3MTk1MzQyLjc1ODEyMTc=',
|
||||
userUuid: '123',
|
||||
analyticsId: 123,
|
||||
sessionUuid: null,
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
})
|
||||
|
||||
it('should sync items with defaulting API version if none specified', async () => {
|
||||
delete request.body.api
|
||||
|
||||
const httpResponse = <results.JsonResult>await createController().sync(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(syncItems.execute).toHaveBeenCalledWith({
|
||||
apiVersion: '20161215',
|
||||
computeIntegrityHash: false,
|
||||
itemHashes: [
|
||||
{
|
||||
content: 'test',
|
||||
content_type: 'Note',
|
||||
created_at: '2021-02-19T11:35:45.655Z',
|
||||
deleted: false,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'test',
|
||||
items_key_id: 'test',
|
||||
updated_at: '2021-02-19T11:35:45.655Z',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
limit: 150,
|
||||
syncToken: 'MjoxNjE3MTk1MzQyLjc1ODEyMTc=',
|
||||
userUuid: '123',
|
||||
analyticsId: 123,
|
||||
sessionUuid: null,
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
})
|
||||
|
||||
it('should sync items with no incoming items in request', async () => {
|
||||
response.locals.session = { uuid: '2-3-4' }
|
||||
delete request.body.items
|
||||
|
||||
const httpResponse = <results.JsonResult>await createController().sync(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(syncItems.execute).toHaveBeenCalledWith({
|
||||
apiVersion: '20200115',
|
||||
computeIntegrityHash: false,
|
||||
itemHashes: [],
|
||||
limit: 150,
|
||||
syncToken: 'MjoxNjE3MTk1MzQyLjc1ODEyMTc=',
|
||||
userUuid: '123',
|
||||
analyticsId: 123,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result.statusCode).toEqual(200)
|
||||
})
|
||||
})
|
86
packages/syncing-server/src/Controller/ItemsController.ts
Normal file
86
packages/syncing-server/src/Controller/ItemsController.ts
Normal file
|
@ -0,0 +1,86 @@
|
|||
import { Request, Response } from 'express'
|
||||
import { inject } from 'inversify'
|
||||
import { BaseHttpController, controller, httpGet, httpPost, results } from 'inversify-express-utils'
|
||||
import TYPES from '../Bootstrap/Types'
|
||||
import { ApiVersion } from '../Domain/Api/ApiVersion'
|
||||
import { Item } from '../Domain/Item/Item'
|
||||
import { SyncResponseFactoryResolverInterface } from '../Domain/Item/SyncResponse/SyncResponseFactoryResolverInterface'
|
||||
import { CheckIntegrity } from '../Domain/UseCase/CheckIntegrity/CheckIntegrity'
|
||||
import { GetItem } from '../Domain/UseCase/GetItem/GetItem'
|
||||
import { SyncItems } from '../Domain/UseCase/SyncItems'
|
||||
import { ItemProjection } from '../Projection/ItemProjection'
|
||||
import { ProjectorInterface } from '../Projection/ProjectorInterface'
|
||||
|
||||
@controller('/items', TYPES.AuthMiddleware)
|
||||
export class ItemsController extends BaseHttpController {
|
||||
constructor(
|
||||
@inject(TYPES.SyncItems) private syncItems: SyncItems,
|
||||
@inject(TYPES.CheckIntegrity) private checkIntegrity: CheckIntegrity,
|
||||
@inject(TYPES.GetItem) private getItem: GetItem,
|
||||
@inject(TYPES.ItemProjector) private itemProjector: ProjectorInterface<Item, ItemProjection>,
|
||||
@inject(TYPES.SyncResponseFactoryResolver)
|
||||
private syncResponseFactoryResolver: SyncResponseFactoryResolverInterface,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
@httpPost('/sync')
|
||||
public async sync(request: Request, response: Response): Promise<results.JsonResult> {
|
||||
let itemHashes = []
|
||||
if ('items' in request.body) {
|
||||
itemHashes = request.body.items
|
||||
}
|
||||
|
||||
const syncResult = await this.syncItems.execute({
|
||||
userUuid: response.locals.user.uuid,
|
||||
itemHashes,
|
||||
computeIntegrityHash: request.body.compute_integrity === true,
|
||||
syncToken: request.body.sync_token,
|
||||
cursorToken: request.body.cursor_token,
|
||||
limit: request.body.limit,
|
||||
contentType: request.body.content_type,
|
||||
apiVersion: request.body.api ?? ApiVersion.v20161215,
|
||||
readOnlyAccess: response.locals.readOnlyAccess,
|
||||
analyticsId: response.locals.analyticsId,
|
||||
sessionUuid: response.locals.session ? response.locals.session.uuid : null,
|
||||
})
|
||||
|
||||
const syncResponse = await this.syncResponseFactoryResolver
|
||||
.resolveSyncResponseFactoryVersion(request.body.api)
|
||||
.createResponse(syncResult)
|
||||
|
||||
return this.json(syncResponse)
|
||||
}
|
||||
|
||||
@httpPost('/check-integrity')
|
||||
public async checkItemsIntegrity(request: Request, response: Response): Promise<results.JsonResult> {
|
||||
let integrityPayloads = []
|
||||
if ('integrityPayloads' in request.body) {
|
||||
integrityPayloads = request.body.integrityPayloads
|
||||
}
|
||||
|
||||
const result = await this.checkIntegrity.execute({
|
||||
userUuid: response.locals.user.uuid,
|
||||
integrityPayloads,
|
||||
})
|
||||
|
||||
return this.json(result)
|
||||
}
|
||||
|
||||
@httpGet('/:uuid')
|
||||
public async getSingleItem(
|
||||
request: Request,
|
||||
response: Response,
|
||||
): Promise<results.NotFoundResult | results.JsonResult> {
|
||||
const result = await this.getItem.execute({
|
||||
userUuid: response.locals.user.uuid,
|
||||
itemUuid: request.params.uuid,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
return this.notFound()
|
||||
}
|
||||
|
||||
return this.json({ item: await this.itemProjector.projectFull(result.item) })
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { Revision } from '../Domain/Revision/Revision'
|
||||
import * as express from 'express'
|
||||
|
||||
import { RevisionsController } from './RevisionsController'
|
||||
import { results } from 'inversify-express-utils'
|
||||
import { ProjectorInterface } from '../Projection/ProjectorInterface'
|
||||
import { RevisionServiceInterface } from '../Domain/Revision/RevisionServiceInterface'
|
||||
import { RevisionProjection } from '../Projection/RevisionProjection'
|
||||
|
||||
describe('RevisionsController', () => {
|
||||
let revisionProjector: ProjectorInterface<Revision, RevisionProjection>
|
||||
let revisionService: RevisionServiceInterface
|
||||
let revision: Revision
|
||||
let request: express.Request
|
||||
let response: express.Response
|
||||
|
||||
const createController = () => new RevisionsController(revisionService, revisionProjector)
|
||||
|
||||
beforeEach(() => {
|
||||
revision = {} as jest.Mocked<Revision>
|
||||
|
||||
revisionProjector = {} as jest.Mocked<ProjectorInterface<Revision, RevisionProjection>>
|
||||
|
||||
revisionService = {} as jest.Mocked<RevisionServiceInterface>
|
||||
revisionService.getRevisions = jest.fn().mockReturnValue([revision])
|
||||
revisionService.getRevision = jest.fn().mockReturnValue(revision)
|
||||
revisionService.removeRevision = jest.fn().mockReturnValue(true)
|
||||
|
||||
request = {
|
||||
params: {},
|
||||
} as jest.Mocked<express.Request>
|
||||
|
||||
response = {
|
||||
locals: {},
|
||||
} as jest.Mocked<express.Response>
|
||||
response.locals.user = {
|
||||
uuid: '123',
|
||||
}
|
||||
response.locals.roleNames = ['BASIC_USER']
|
||||
})
|
||||
|
||||
it('should return revisions for an item', async () => {
|
||||
revisionProjector.projectSimple = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
|
||||
const revisionResponse = await createController().getRevisions(request, response)
|
||||
|
||||
expect(revisionResponse.json).toEqual([{ foo: 'bar' }])
|
||||
})
|
||||
|
||||
it('should return a specific revision for an item', async () => {
|
||||
revisionProjector.projectFull = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
|
||||
const httpResponse = <results.JsonResult>await createController().getRevision(request, response)
|
||||
|
||||
expect(httpResponse.json).toEqual({ foo: 'bar' })
|
||||
})
|
||||
|
||||
it('should remove a specific revision for an item', async () => {
|
||||
const httpResponse = await createController().deleteRevision(request, response)
|
||||
|
||||
expect(httpResponse).toBeInstanceOf(results.OkResult)
|
||||
})
|
||||
|
||||
it('should not remove a specific revision for an item if it fails', async () => {
|
||||
revisionService.removeRevision = jest.fn().mockReturnValue(false)
|
||||
|
||||
const httpResponse = await createController().deleteRevision(request, response)
|
||||
|
||||
expect(httpResponse).toBeInstanceOf(results.BadRequestResult)
|
||||
})
|
||||
|
||||
it('should not remove a specific revision for an item the session is read only', async () => {
|
||||
response.locals.readOnlyAccess = true
|
||||
|
||||
const httpResponse = await createController().deleteRevision(request, response)
|
||||
const result = await httpResponse.executeAsync()
|
||||
|
||||
expect(result.statusCode).toEqual(401)
|
||||
})
|
||||
|
||||
it('should return a 404 for a not found specific revision in an item', async () => {
|
||||
revisionService.getRevision = jest.fn().mockReturnValue(null)
|
||||
|
||||
const httpResponse = await createController().getRevision(request, response)
|
||||
|
||||
expect(httpResponse).toBeInstanceOf(results.NotFoundResult)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,80 @@
|
|||
import { Request, Response } from 'express'
|
||||
import { BaseHttpController, controller, httpDelete, httpGet, results } from 'inversify-express-utils'
|
||||
import { inject } from 'inversify'
|
||||
|
||||
import TYPES from '../Bootstrap/Types'
|
||||
import { ProjectorInterface } from '../Projection/ProjectorInterface'
|
||||
import { Revision } from '../Domain/Revision/Revision'
|
||||
import { RevisionServiceInterface } from '../Domain/Revision/RevisionServiceInterface'
|
||||
import { ErrorTag } from '@standardnotes/common'
|
||||
import { RevisionProjection } from '../Projection/RevisionProjection'
|
||||
|
||||
@controller('/items/:itemUuid/revisions', TYPES.AuthMiddleware)
|
||||
export class RevisionsController extends BaseHttpController {
|
||||
constructor(
|
||||
@inject(TYPES.RevisionService) private revisionService: RevisionServiceInterface,
|
||||
@inject(TYPES.RevisionProjector) private revisionProjector: ProjectorInterface<Revision, RevisionProjection>,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
@httpGet('/')
|
||||
public async getRevisions(req: Request, response: Response): Promise<results.JsonResult> {
|
||||
const revisions = await this.revisionService.getRevisions(response.locals.user.uuid, req.params.itemUuid)
|
||||
|
||||
const revisionProjections = []
|
||||
for (const revision of revisions) {
|
||||
revisionProjections.push(await this.revisionProjector.projectSimple(revision))
|
||||
}
|
||||
|
||||
return this.json(revisionProjections)
|
||||
}
|
||||
|
||||
@httpGet('/:uuid')
|
||||
public async getRevision(request: Request, response: Response): Promise<results.JsonResult | results.NotFoundResult> {
|
||||
const revision = await this.revisionService.getRevision({
|
||||
userRoles: response.locals.roleNames,
|
||||
userUuid: response.locals.user.uuid,
|
||||
itemUuid: request.params.itemUuid,
|
||||
revisionUuid: request.params.uuid,
|
||||
})
|
||||
|
||||
if (!revision) {
|
||||
return this.notFound()
|
||||
}
|
||||
|
||||
const revisionProjection = await this.revisionProjector.projectFull(revision)
|
||||
|
||||
return this.json(revisionProjection)
|
||||
}
|
||||
|
||||
@httpDelete('/:uuid')
|
||||
public async deleteRevision(
|
||||
request: Request,
|
||||
response: Response,
|
||||
): Promise<results.BadRequestResult | results.OkResult | results.JsonResult> {
|
||||
if (response.locals.readOnlyAccess) {
|
||||
return this.json(
|
||||
{
|
||||
error: {
|
||||
tag: ErrorTag.ReadOnlyAccess,
|
||||
message: 'Session has read-only access.',
|
||||
},
|
||||
},
|
||||
401,
|
||||
)
|
||||
}
|
||||
|
||||
const success = await this.revisionService.removeRevision({
|
||||
userUuid: response.locals.user.uuid,
|
||||
itemUuid: request.params.itemUuid,
|
||||
revisionUuid: request.params.uuid,
|
||||
})
|
||||
|
||||
if (!success) {
|
||||
return this.badRequest()
|
||||
}
|
||||
|
||||
return this.ok()
|
||||
}
|
||||
}
|
5
packages/syncing-server/src/Domain/Api/ApiVersion.ts
Normal file
5
packages/syncing-server/src/Domain/Api/ApiVersion.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
export enum ApiVersion {
|
||||
v20161215 = '20161215',
|
||||
v20190520 = '20190520',
|
||||
v20200115 = '20200115',
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
import { SettingName } from 'aws-sdk/clients/ecs'
|
||||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
|
||||
export interface AuthHttpServiceInterface {
|
||||
getUserKeyParams(dto: { email?: string; uuid?: string; authenticated: boolean }): Promise<KeyParamsData>
|
||||
getUserSetting(userUuid: string, settingName: SettingName): Promise<{ uuid: string; value: string | null }>
|
||||
}
|
|
@ -0,0 +1,210 @@
|
|||
import { TimerInterface } from '@standardnotes/time'
|
||||
import 'reflect-metadata'
|
||||
|
||||
import { DomainEventFactory } from './DomainEventFactory'
|
||||
|
||||
describe('DomainEventFactory', () => {
|
||||
let timer: TimerInterface
|
||||
|
||||
const createFactory = () => new DomainEventFactory(timer)
|
||||
|
||||
beforeEach(() => {
|
||||
timer = {} as jest.Mocked<TimerInterface>
|
||||
timer.getUTCDate = jest.fn().mockReturnValue(new Date(1))
|
||||
})
|
||||
|
||||
it('should create a USER_REGISTERED event', () => {
|
||||
expect(createFactory().createUserRegisteredEvent('1-2-3', 'test@test.te')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: '1-2-3',
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
userUuid: '1-2-3',
|
||||
email: 'test@test.te',
|
||||
},
|
||||
type: 'USER_REGISTERED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a ITEMS_SYNCED event', () => {
|
||||
expect(
|
||||
createFactory().createItemsSyncedEvent({
|
||||
userUuid: '1-2-3',
|
||||
extensionUrl: 'https://test.com',
|
||||
extensionId: '2-3-4',
|
||||
itemUuids: ['3-4-5'],
|
||||
forceMute: false,
|
||||
skipFileBackup: false,
|
||||
source: 'realtime-extensions-sync',
|
||||
}),
|
||||
).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: '1-2-3',
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
userUuid: '1-2-3',
|
||||
extensionUrl: 'https://test.com',
|
||||
extensionId: '2-3-4',
|
||||
itemUuids: ['3-4-5'],
|
||||
forceMute: false,
|
||||
skipFileBackup: false,
|
||||
source: 'realtime-extensions-sync',
|
||||
},
|
||||
type: 'ITEMS_SYNCED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a DROPBOX_BACKUP_FAILED event', () => {
|
||||
expect(createFactory().createDropboxBackupFailedEvent('1-2-3', 'test@test.com')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: 'test@test.com',
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
email: 'test@test.com',
|
||||
muteCloudEmailsSettingUuid: '1-2-3',
|
||||
},
|
||||
type: 'DROPBOX_BACKUP_FAILED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a GOOGLE_DRIVE_BACKUP_FAILED event', () => {
|
||||
expect(createFactory().createGoogleDriveBackupFailedEvent('1-2-3', 'test@test.com')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: 'test@test.com',
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
email: 'test@test.com',
|
||||
muteCloudEmailsSettingUuid: '1-2-3',
|
||||
},
|
||||
type: 'GOOGLE_DRIVE_BACKUP_FAILED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a ONE_DRIVE_BACKUP_FAILED event', () => {
|
||||
expect(createFactory().createOneDriveBackupFailedEvent('1-2-3', 'test@test.com')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: 'test@test.com',
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
email: 'test@test.com',
|
||||
muteCloudEmailsSettingUuid: '1-2-3',
|
||||
},
|
||||
type: 'ONE_DRIVE_BACKUP_FAILED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a MAIL_BACKUP_ATTACHMENT_TOO_BIG event', () => {
|
||||
expect(
|
||||
createFactory().createMailBackupAttachmentTooBigEvent({
|
||||
allowedSize: '1000',
|
||||
attachmentSize: '1500',
|
||||
muteEmailsSettingUuid: '1-2-3',
|
||||
email: 'test@test.com',
|
||||
}),
|
||||
).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: 'test@test.com',
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
email: 'test@test.com',
|
||||
muteEmailsSettingUuid: '1-2-3',
|
||||
allowedSize: '1000',
|
||||
attachmentSize: '1500',
|
||||
},
|
||||
type: 'MAIL_BACKUP_ATTACHMENT_TOO_BIG',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a EMAIL_ARCHIVE_EXTENSION_SYNCED event', () => {
|
||||
expect(createFactory().createEmailArchiveExtensionSyncedEvent('1-2-3', '2-3-4')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: '1-2-3',
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
},
|
||||
type: 'EMAIL_ARCHIVE_EXTENSION_SYNCED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a EMAIL_BACKUP_ATTACHMENT_CREATED event', () => {
|
||||
expect(
|
||||
createFactory().createEmailBackupAttachmentCreatedEvent({
|
||||
backupFileName: 'backup-file',
|
||||
email: 'test@test.com',
|
||||
backupFileIndex: 1,
|
||||
backupFilesTotal: 2,
|
||||
}),
|
||||
).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: 'test@test.com',
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
backupFileName: 'backup-file',
|
||||
email: 'test@test.com',
|
||||
backupFileIndex: 1,
|
||||
backupFilesTotal: 2,
|
||||
},
|
||||
type: 'EMAIL_BACKUP_ATTACHMENT_CREATED',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a DUPLICATE_ITEM_SYNCED event', () => {
|
||||
expect(createFactory().createDuplicateItemSyncedEvent('1-2-3', '2-3-4')).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: '2-3-4',
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: 'syncing-server',
|
||||
},
|
||||
payload: {
|
||||
itemUuid: '1-2-3',
|
||||
userUuid: '2-3-4',
|
||||
},
|
||||
type: 'DUPLICATE_ITEM_SYNCED',
|
||||
})
|
||||
})
|
||||
})
|
192
packages/syncing-server/src/Domain/Event/DomainEventFactory.ts
Normal file
192
packages/syncing-server/src/Domain/Event/DomainEventFactory.ts
Normal file
|
@ -0,0 +1,192 @@
|
|||
import {
|
||||
DomainEventService,
|
||||
DropboxBackupFailedEvent,
|
||||
DuplicateItemSyncedEvent,
|
||||
EmailArchiveExtensionSyncedEvent,
|
||||
EmailBackupAttachmentCreatedEvent,
|
||||
GoogleDriveBackupFailedEvent,
|
||||
ItemsSyncedEvent,
|
||||
MailBackupAttachmentTooBigEvent,
|
||||
OneDriveBackupFailedEvent,
|
||||
UserRegisteredEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { DomainEventFactoryInterface } from './DomainEventFactoryInterface'
|
||||
|
||||
@injectable()
|
||||
export class DomainEventFactory implements DomainEventFactoryInterface {
|
||||
constructor(@inject(TYPES.Timer) private timer: TimerInterface) {}
|
||||
|
||||
createDuplicateItemSyncedEvent(itemUuid: string, userUuid: string): DuplicateItemSyncedEvent {
|
||||
return {
|
||||
type: 'DUPLICATE_ITEM_SYNCED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: userUuid,
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
itemUuid,
|
||||
userUuid,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createDropboxBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): DropboxBackupFailedEvent {
|
||||
return {
|
||||
type: 'DROPBOX_BACKUP_FAILED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: email,
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
muteCloudEmailsSettingUuid,
|
||||
email,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createGoogleDriveBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): GoogleDriveBackupFailedEvent {
|
||||
return {
|
||||
type: 'GOOGLE_DRIVE_BACKUP_FAILED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: email,
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
muteCloudEmailsSettingUuid,
|
||||
email,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createOneDriveBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): OneDriveBackupFailedEvent {
|
||||
return {
|
||||
type: 'ONE_DRIVE_BACKUP_FAILED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: email,
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
muteCloudEmailsSettingUuid,
|
||||
email,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createMailBackupAttachmentTooBigEvent(dto: {
|
||||
allowedSize: string
|
||||
attachmentSize: string
|
||||
muteEmailsSettingUuid: string
|
||||
email: string
|
||||
}): MailBackupAttachmentTooBigEvent {
|
||||
return {
|
||||
type: 'MAIL_BACKUP_ATTACHMENT_TOO_BIG',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: dto.email,
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: dto,
|
||||
}
|
||||
}
|
||||
|
||||
createItemsSyncedEvent(dto: {
|
||||
userUuid: string
|
||||
extensionUrl: string
|
||||
extensionId: string
|
||||
itemUuids: Array<string>
|
||||
forceMute: boolean
|
||||
skipFileBackup: boolean
|
||||
source: 'account-deletion' | 'realtime-extensions-sync'
|
||||
}): ItemsSyncedEvent {
|
||||
return {
|
||||
type: 'ITEMS_SYNCED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: dto.userUuid,
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: dto,
|
||||
}
|
||||
}
|
||||
|
||||
createUserRegisteredEvent(userUuid: string, email: string): UserRegisteredEvent {
|
||||
return {
|
||||
type: 'USER_REGISTERED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: userUuid,
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
userUuid,
|
||||
email,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createEmailArchiveExtensionSyncedEvent(userUuid: string, extensionId: string): EmailArchiveExtensionSyncedEvent {
|
||||
return {
|
||||
type: 'EMAIL_ARCHIVE_EXTENSION_SYNCED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: userUuid,
|
||||
userIdentifierType: 'uuid',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: {
|
||||
userUuid,
|
||||
extensionId,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
createEmailBackupAttachmentCreatedEvent(dto: {
|
||||
backupFileName: string
|
||||
backupFileIndex: number
|
||||
backupFilesTotal: number
|
||||
email: string
|
||||
}): EmailBackupAttachmentCreatedEvent {
|
||||
return {
|
||||
type: 'EMAIL_BACKUP_ATTACHMENT_CREATED',
|
||||
createdAt: this.timer.getUTCDate(),
|
||||
meta: {
|
||||
correlation: {
|
||||
userIdentifier: dto.email,
|
||||
userIdentifierType: 'email',
|
||||
},
|
||||
origin: DomainEventService.SyncingServer,
|
||||
},
|
||||
payload: dto,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
import {
|
||||
DropboxBackupFailedEvent,
|
||||
DuplicateItemSyncedEvent,
|
||||
EmailArchiveExtensionSyncedEvent,
|
||||
EmailBackupAttachmentCreatedEvent,
|
||||
GoogleDriveBackupFailedEvent,
|
||||
ItemsSyncedEvent,
|
||||
MailBackupAttachmentTooBigEvent,
|
||||
OneDriveBackupFailedEvent,
|
||||
UserRegisteredEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
|
||||
export interface DomainEventFactoryInterface {
|
||||
createUserRegisteredEvent(userUuid: string, email: string): UserRegisteredEvent
|
||||
createDropboxBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): DropboxBackupFailedEvent
|
||||
createGoogleDriveBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): GoogleDriveBackupFailedEvent
|
||||
createOneDriveBackupFailedEvent(muteCloudEmailsSettingUuid: string, email: string): OneDriveBackupFailedEvent
|
||||
createMailBackupAttachmentTooBigEvent(dto: {
|
||||
allowedSize: string
|
||||
attachmentSize: string
|
||||
muteEmailsSettingUuid: string
|
||||
email: string
|
||||
}): MailBackupAttachmentTooBigEvent
|
||||
createItemsSyncedEvent(dto: {
|
||||
userUuid: string
|
||||
extensionUrl: string
|
||||
extensionId: string
|
||||
itemUuids: Array<string>
|
||||
forceMute: boolean
|
||||
skipFileBackup: boolean
|
||||
source: 'account-deletion' | 'realtime-extensions-sync'
|
||||
}): ItemsSyncedEvent
|
||||
createEmailArchiveExtensionSyncedEvent(userUuid: string, extensionId: string): EmailArchiveExtensionSyncedEvent
|
||||
createEmailBackupAttachmentCreatedEvent(dto: {
|
||||
backupFileName: string
|
||||
backupFileIndex: number
|
||||
backupFilesTotal: number
|
||||
email: string
|
||||
}): EmailBackupAttachmentCreatedEvent
|
||||
createDuplicateItemSyncedEvent(itemUuid: string, userUuid: string): DuplicateItemSyncedEvent
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
export enum ExtensionName {
|
||||
Dropbox = 'Dropbox',
|
||||
GoogleDrive = 'Google Drive',
|
||||
OneDrive = 'OneDrive',
|
||||
}
|
|
@ -0,0 +1,445 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { ContentDecoderInterface } from '../Item/ContentDecoderInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ExtensionsHttpService } from './ExtensionsHttpService'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { AxiosInstance } from 'axios'
|
||||
|
||||
describe('ExtensionsHttpService', () => {
|
||||
let httpClient: AxiosInstance
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let contentDecoder: ContentDecoderInterface
|
||||
let domainEventPublisher: DomainEventPublisherInterface
|
||||
let domainEventFactory: DomainEventFactoryInterface
|
||||
let item: Item
|
||||
let authParams: KeyParamsData
|
||||
let logger: Logger
|
||||
|
||||
const createService = () =>
|
||||
new ExtensionsHttpService(
|
||||
httpClient,
|
||||
itemRepository,
|
||||
contentDecoder,
|
||||
domainEventPublisher,
|
||||
domainEventFactory,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
httpClient = {} as jest.Mocked<AxiosInstance>
|
||||
httpClient.request = jest.fn().mockReturnValue({ status: 200, data: { foo: 'bar' } })
|
||||
|
||||
item = {
|
||||
content: 'test',
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
authParams = {} as jest.Mocked<KeyParamsData>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findByUuidAndUserUuid = jest.fn().mockReturnValue(item)
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.error = jest.fn()
|
||||
|
||||
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
|
||||
domainEventPublisher.publish = jest.fn()
|
||||
|
||||
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
|
||||
domainEventFactory.createDropboxBackupFailedEvent = jest.fn()
|
||||
domainEventFactory.createGoogleDriveBackupFailedEvent = jest.fn()
|
||||
domainEventFactory.createOneDriveBackupFailedEvent = jest.fn()
|
||||
|
||||
contentDecoder = {} as jest.Mocked<ContentDecoderInterface>
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'Dropbox' })
|
||||
})
|
||||
|
||||
it('should trigger cloud backup on extensions server', async () => {
|
||||
await createService().triggerCloudBackupOnExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
backupFilename: 'test',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
cloudProvider: 'DROPBOX',
|
||||
})
|
||||
|
||||
expect(httpClient.request).toHaveBeenCalledWith({
|
||||
data: {
|
||||
auth_params: authParams,
|
||||
backup_filename: 'test',
|
||||
settings_id: '3-4-5',
|
||||
silent: false,
|
||||
user_uuid: '1-2-3',
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
method: 'POST',
|
||||
url: 'https://extensions-server/extension1',
|
||||
validateStatus: expect.any(Function),
|
||||
})
|
||||
})
|
||||
|
||||
it('should publish a failed Dropbox backup event if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'Dropbox' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().triggerCloudBackupOnExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
backupFilename: 'test',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
cloudProvider: 'DROPBOX',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createDropboxBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send items to extensions server', async () => {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: '',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(httpClient.request).toHaveBeenCalledWith({
|
||||
data: {
|
||||
auth_params: authParams,
|
||||
backup_filename: '',
|
||||
items: [item],
|
||||
settings_id: '3-4-5',
|
||||
silent: false,
|
||||
user_uuid: '1-2-3',
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
method: 'POST',
|
||||
url: 'https://extensions-server/extension1',
|
||||
validateStatus: expect.any(Function),
|
||||
})
|
||||
})
|
||||
|
||||
it('should send items proxy backup file name only to extensions server', async () => {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(httpClient.request).toHaveBeenCalledWith({
|
||||
data: {
|
||||
auth_params: authParams,
|
||||
backup_filename: 'backup-file',
|
||||
settings_id: '3-4-5',
|
||||
silent: false,
|
||||
user_uuid: '1-2-3',
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
method: 'POST',
|
||||
url: 'https://extensions-server/extension1',
|
||||
validateStatus: expect.any(Function),
|
||||
})
|
||||
})
|
||||
|
||||
it('should publish a failed Dropbox backup event if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'Dropbox' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createDropboxBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should publish a failed Dropbox backup event if request was sent and extensions server responded not ok', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'Dropbox' })
|
||||
|
||||
httpClient.request = jest.fn().mockReturnValue({ status: 400, data: { error: 'foo-bar' } })
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createDropboxBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should publish a failed Google Drive backup event if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'Google Drive' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createGoogleDriveBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should publish a failed One Drive backup event if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'OneDrive' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createOneDriveBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not publish a failed backup event if emailes are force muted', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ name: 'OneDrive' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: true,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should throw an error if the extension to post to is not found', async () => {
|
||||
itemRepository.findByUuidAndUserUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
let error = null
|
||||
try {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should throw an error if the extension to post to has no content', async () => {
|
||||
item = {} as jest.Mocked<Item>
|
||||
itemRepository.findByUuidAndUserUuid = jest.fn().mockReturnValue(item)
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
let error = null
|
||||
try {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should publish a failed Dropbox backup event judging by extension url if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ url: 'https://dbt.com/...' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createDropboxBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should publish a failed Google Drive backup event judging by extension url if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ url: 'https://gdrive.com/...' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createGoogleDriveBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should publish a failed One Drive backup event judging by extension url if request was not sent successfully', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ url: 'https://onedrive.com/...' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalled()
|
||||
expect(domainEventFactory.createOneDriveBackupFailedEvent).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should throw an error if cannot deduce extension by judging from the url', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({ url: 'https://foobar.com/...' })
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
let error = null
|
||||
try {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should throw an error if there is no extension name or url', async () => {
|
||||
contentDecoder.decode = jest.fn().mockReturnValue({})
|
||||
|
||||
httpClient.request = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Could not reach the extensions server')
|
||||
})
|
||||
|
||||
let error = null
|
||||
try {
|
||||
await createService().sendItemsToExtensionsServer({
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
backupFilename: 'backup-file',
|
||||
authParams,
|
||||
muteEmailsSettingUuid: '3-4-5',
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,171 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import { DomainEventInterface, DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { AxiosInstance } from 'axios'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { ContentDecoderInterface } from '../Item/ContentDecoderInterface'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ExtensionName } from './ExtensionName'
|
||||
import { ExtensionsHttpServiceInterface } from './ExtensionsHttpServiceInterface'
|
||||
import { SendItemsToExtensionsServerDTO } from './SendItemsToExtensionsServerDTO'
|
||||
|
||||
@injectable()
|
||||
export class ExtensionsHttpService implements ExtensionsHttpServiceInterface {
|
||||
constructor(
|
||||
@inject(TYPES.HTTPClient) private httpClient: AxiosInstance,
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.ContentDecoder) private contentDecoder: ContentDecoderInterface,
|
||||
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async triggerCloudBackupOnExtensionsServer(dto: {
|
||||
cloudProvider: 'DROPBOX' | 'GOOGLE_DRIVE' | 'ONE_DRIVE'
|
||||
extensionsServerUrl: string
|
||||
backupFilename: string
|
||||
authParams: KeyParamsData
|
||||
forceMute: boolean
|
||||
userUuid: string
|
||||
muteEmailsSettingUuid: string
|
||||
}): Promise<void> {
|
||||
let sent = false
|
||||
try {
|
||||
const payload: Record<string, unknown> = {
|
||||
backup_filename: dto.backupFilename,
|
||||
auth_params: dto.authParams,
|
||||
silent: dto.forceMute,
|
||||
user_uuid: dto.userUuid,
|
||||
settings_id: dto.muteEmailsSettingUuid,
|
||||
}
|
||||
|
||||
const response = await this.httpClient.request({
|
||||
method: 'POST',
|
||||
url: dto.extensionsServerUrl,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
data: payload,
|
||||
validateStatus:
|
||||
/* istanbul ignore next */
|
||||
(status: number) => status >= 200 && status < 500,
|
||||
})
|
||||
|
||||
sent = response.status >= 200 && response.status < 300
|
||||
} catch (error) {
|
||||
this.logger.error(`[${dto.userUuid}] Failed to send a request to extensions server: ${(error as Error).message}`)
|
||||
}
|
||||
|
||||
if (!sent && !dto.forceMute && dto.muteEmailsSettingUuid !== undefined) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.createCloudBackupFailedEventBasedOnProvider(
|
||||
dto.cloudProvider,
|
||||
dto.authParams.identifier as string,
|
||||
dto.muteEmailsSettingUuid,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async sendItemsToExtensionsServer(dto: SendItemsToExtensionsServerDTO): Promise<void> {
|
||||
let sent = false
|
||||
try {
|
||||
const payload: Record<string, unknown> = {
|
||||
backup_filename: dto.backupFilename,
|
||||
auth_params: dto.authParams,
|
||||
silent: dto.forceMute,
|
||||
user_uuid: dto.userUuid,
|
||||
settings_id: dto.muteEmailsSettingUuid,
|
||||
}
|
||||
if (dto.items !== undefined) {
|
||||
payload.items = dto.items
|
||||
}
|
||||
|
||||
const response = await this.httpClient.request({
|
||||
method: 'POST',
|
||||
url: dto.extensionsServerUrl,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
data: payload,
|
||||
validateStatus:
|
||||
/* istanbul ignore next */
|
||||
(status: number) => status >= 200 && status < 500,
|
||||
})
|
||||
|
||||
sent = response.status >= 200 && response.status < 300
|
||||
} catch (error) {
|
||||
this.logger.error(`[${dto.userUuid}] Failed to send a request to extensions server: ${(error as Error).message}`)
|
||||
}
|
||||
|
||||
if (!sent && !dto.forceMute && dto.muteEmailsSettingUuid !== undefined) {
|
||||
await this.domainEventPublisher.publish(
|
||||
await this.getBackupFailedEvent(
|
||||
dto.muteEmailsSettingUuid,
|
||||
dto.extensionId,
|
||||
dto.userUuid,
|
||||
dto.authParams.identifier as string,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private createCloudBackupFailedEventBasedOnProvider(
|
||||
cloudProvider: 'DROPBOX' | 'GOOGLE_DRIVE' | 'ONE_DRIVE',
|
||||
email: string,
|
||||
muteCloudEmailsSettingUuid: string,
|
||||
): DomainEventInterface {
|
||||
switch (cloudProvider) {
|
||||
case 'DROPBOX':
|
||||
return this.domainEventFactory.createDropboxBackupFailedEvent(muteCloudEmailsSettingUuid, email)
|
||||
case 'GOOGLE_DRIVE':
|
||||
return this.domainEventFactory.createGoogleDriveBackupFailedEvent(muteCloudEmailsSettingUuid, email)
|
||||
case 'ONE_DRIVE':
|
||||
return this.domainEventFactory.createOneDriveBackupFailedEvent(muteCloudEmailsSettingUuid, email)
|
||||
}
|
||||
}
|
||||
|
||||
private async getBackupFailedEvent(
|
||||
muteCloudEmailsSettingUuid: string,
|
||||
extensionId: string,
|
||||
userUuid: string,
|
||||
email: string,
|
||||
): Promise<DomainEventInterface> {
|
||||
const extension = await this.itemRepository.findByUuidAndUserUuid(extensionId, userUuid)
|
||||
if (extension === null || !extension.content) {
|
||||
throw Error(`Could not find extensions with id ${extensionId}`)
|
||||
}
|
||||
|
||||
const content = this.contentDecoder.decode(extension.content)
|
||||
switch (this.getExtensionName(content)) {
|
||||
case ExtensionName.Dropbox:
|
||||
return this.createCloudBackupFailedEventBasedOnProvider('DROPBOX', muteCloudEmailsSettingUuid, email)
|
||||
case ExtensionName.GoogleDrive:
|
||||
return this.createCloudBackupFailedEventBasedOnProvider('GOOGLE_DRIVE', muteCloudEmailsSettingUuid, email)
|
||||
case ExtensionName.OneDrive:
|
||||
return this.createCloudBackupFailedEventBasedOnProvider('ONE_DRIVE', muteCloudEmailsSettingUuid, email)
|
||||
}
|
||||
}
|
||||
|
||||
private getExtensionName(content: Record<string, unknown>): ExtensionName {
|
||||
if ('name' in content) {
|
||||
return <ExtensionName>content.name
|
||||
}
|
||||
|
||||
const url = 'url' in content ? <string>content.url : undefined
|
||||
|
||||
if (url) {
|
||||
if (url.indexOf('dbt') !== -1) {
|
||||
return ExtensionName.Dropbox
|
||||
} else if (url.indexOf('gdrive') !== -1) {
|
||||
return ExtensionName.GoogleDrive
|
||||
} else if (url.indexOf('onedrive') !== -1) {
|
||||
return ExtensionName.OneDrive
|
||||
}
|
||||
}
|
||||
|
||||
throw Error('Could not deduce extension name from extension content')
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import { SendItemsToExtensionsServerDTO } from './SendItemsToExtensionsServerDTO'
|
||||
|
||||
export interface ExtensionsHttpServiceInterface {
|
||||
triggerCloudBackupOnExtensionsServer(dto: {
|
||||
cloudProvider: 'DROPBOX' | 'GOOGLE_DRIVE' | 'ONE_DRIVE'
|
||||
extensionsServerUrl: string
|
||||
backupFilename: string
|
||||
authParams: KeyParamsData
|
||||
forceMute: boolean
|
||||
userUuid: string
|
||||
muteEmailsSettingUuid: string
|
||||
}): Promise<void>
|
||||
sendItemsToExtensionsServer(dto: SendItemsToExtensionsServerDTO): Promise<void>
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import { Item } from '../Item/Item'
|
||||
|
||||
export type SendItemsToExtensionsServerDTO = {
|
||||
extensionsServerUrl: string
|
||||
extensionId: string
|
||||
backupFilename: string
|
||||
authParams: KeyParamsData
|
||||
forceMute: boolean
|
||||
userUuid: string
|
||||
muteEmailsSettingUuid?: string
|
||||
items?: Array<Item>
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { AccountDeletionRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { AccountDeletionRequestedEventHandler } from './AccountDeletionRequestedEventHandler'
|
||||
|
||||
describe('AccountDeletionRequestedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let logger: Logger
|
||||
let event: AccountDeletionRequestedEvent
|
||||
let item: Item
|
||||
|
||||
const createHandler = () => new AccountDeletionRequestedEventHandler(itemRepository, logger)
|
||||
|
||||
beforeEach(() => {
|
||||
item = {
|
||||
uuid: '1-2-3',
|
||||
content: 'test',
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item])
|
||||
itemRepository.deleteByUserUuid = jest.fn()
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.info = jest.fn()
|
||||
|
||||
event = {} as jest.Mocked<AccountDeletionRequestedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
userUuid: '2-3-4',
|
||||
regularSubscriptionUuid: '1-2-3',
|
||||
}
|
||||
})
|
||||
|
||||
it('should remove all items and revision for a user', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.deleteByUserUuid).toHaveBeenCalledWith('2-3-4')
|
||||
})
|
||||
})
|
|
@ -0,0 +1,19 @@
|
|||
import { AccountDeletionRequestedEvent, DomainEventHandlerInterface } from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
|
||||
@injectable()
|
||||
export class AccountDeletionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: AccountDeletionRequestedEvent): Promise<void> {
|
||||
await this.itemRepository.deleteByUserUuid(event.payload.userUuid)
|
||||
|
||||
this.logger.info(`Finished account cleanup for user: ${event.payload.userUuid}`)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { CloudBackupRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { CloudBackupRequestedEventHandler } from './CloudBackupRequestedEventHandler'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ExtensionsHttpServiceInterface } from '../Extension/ExtensionsHttpServiceInterface'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
describe('CloudBackupRequestedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let authHttpService: AuthHttpServiceInterface
|
||||
let extensionsHttpService: ExtensionsHttpServiceInterface
|
||||
let itemBackupService: ItemBackupServiceInterface
|
||||
const extensionsServerUrl = 'https://extensions-server'
|
||||
let event: CloudBackupRequestedEvent
|
||||
let item: Item
|
||||
let logger: Logger
|
||||
|
||||
const createHandler = () =>
|
||||
new CloudBackupRequestedEventHandler(
|
||||
itemRepository,
|
||||
authHttpService,
|
||||
extensionsHttpService,
|
||||
itemBackupService,
|
||||
extensionsServerUrl,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
item = {} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item])
|
||||
|
||||
authHttpService = {} as jest.Mocked<AuthHttpServiceInterface>
|
||||
authHttpService.getUserKeyParams = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
// authHttpService.getUserSetting = jest.fn().mockReturnValue
|
||||
|
||||
extensionsHttpService = {} as jest.Mocked<ExtensionsHttpServiceInterface>
|
||||
extensionsHttpService.triggerCloudBackupOnExtensionsServer = jest.fn()
|
||||
|
||||
event = {} as jest.Mocked<CloudBackupRequestedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
cloudProvider: 'DROPBOX',
|
||||
cloudProviderToken: 'test-token',
|
||||
userUuid: '1-2-3',
|
||||
muteEmailsSettingUuid: '2-3-4',
|
||||
userHasEmailsMuted: false,
|
||||
}
|
||||
|
||||
itemBackupService = {} as jest.Mocked<ItemBackupServiceInterface>
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('backup-file-name')
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.debug = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
})
|
||||
|
||||
it('should trigger cloud backup on extensions server - dropbox', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
deleted: false,
|
||||
})
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
cloudProvider: 'DROPBOX',
|
||||
extensionsServerUrl: 'https://extensions-server/dropbox/items/sync?type=sf&dbt=test-token',
|
||||
muteEmailsSettingUuid: '2-3-4',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should trigger cloud backup on extensions server - google drive', async () => {
|
||||
event.payload.cloudProvider = 'GOOGLE_DRIVE'
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
cloudProvider: 'GOOGLE_DRIVE',
|
||||
extensionsServerUrl: 'https://extensions-server/gdrive/sync?key=test-token',
|
||||
muteEmailsSettingUuid: '2-3-4',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should trigger cloud backup on extensions server - one drive', async () => {
|
||||
event.payload.cloudProvider = 'ONE_DRIVE'
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
cloudProvider: 'ONE_DRIVE',
|
||||
extensionsServerUrl: 'https://extensions-server/onedrive/sync?type=sf&key=test-token',
|
||||
muteEmailsSettingUuid: '2-3-4',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not trigger cloud backup on extensions server - unknown', async () => {
|
||||
event.payload.cloudProvider = 'test' as 'DROPBOX' | 'GOOGLE_DRIVE' | 'ONE_DRIVE'
|
||||
|
||||
let expectedError = null
|
||||
try {
|
||||
await createHandler().handle(event)
|
||||
} catch (error) {
|
||||
expectedError = error
|
||||
}
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).not.toHaveBeenCalled()
|
||||
expect(expectedError).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should trigger cloud backup on extensions server with muted emails', async () => {
|
||||
event.payload.userHasEmailsMuted = true
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
deleted: false,
|
||||
})
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
cloudProvider: 'DROPBOX',
|
||||
extensionsServerUrl: 'https://extensions-server/dropbox/items/sync?type=sf&dbt=test-token',
|
||||
muteEmailsSettingUuid: '2-3-4',
|
||||
forceMute: true,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should skip triggering cloud backups on extensions server if user key params cannot be obtained', async () => {
|
||||
authHttpService.getUserKeyParams = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Oops!')
|
||||
})
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(extensionsHttpService.triggerCloudBackupOnExtensionsServer).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,78 @@
|
|||
import { DomainEventHandlerInterface, CloudBackupRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ItemQuery } from '../Item/ItemQuery'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ExtensionsHttpServiceInterface } from '../Extension/ExtensionsHttpServiceInterface'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { Logger } from 'winston'
|
||||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
|
||||
@injectable()
|
||||
export class CloudBackupRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.AuthHttpService) private authHttpService: AuthHttpServiceInterface,
|
||||
@inject(TYPES.ExtensionsHttpService) private extensionsHttpService: ExtensionsHttpServiceInterface,
|
||||
@inject(TYPES.ItemBackupService) private itemBackupService: ItemBackupServiceInterface,
|
||||
@inject(TYPES.EXTENSIONS_SERVER_URL) private extensionsServerUrl: string,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: CloudBackupRequestedEvent): Promise<void> {
|
||||
const items = await this.getItemsForPostingToExtension(event)
|
||||
|
||||
let authParams: KeyParamsData
|
||||
try {
|
||||
authParams = await this.authHttpService.getUserKeyParams({
|
||||
uuid: event.payload.userUuid,
|
||||
authenticated: false,
|
||||
})
|
||||
} catch (error) {
|
||||
this.logger.warn(`Could not get user key params from auth service: ${(error as Error).message}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const backupFilename = await this.itemBackupService.backup(items, authParams)
|
||||
|
||||
this.logger.debug(`Sending ${items.length} items to extensions server for user ${event.payload.userUuid}`)
|
||||
|
||||
await this.extensionsHttpService.triggerCloudBackupOnExtensionsServer({
|
||||
cloudProvider: event.payload.cloudProvider,
|
||||
authParams,
|
||||
backupFilename,
|
||||
forceMute: event.payload.userHasEmailsMuted,
|
||||
muteEmailsSettingUuid: event.payload.muteEmailsSettingUuid,
|
||||
extensionsServerUrl: this.getExtensionsServerUrl(event),
|
||||
userUuid: event.payload.userUuid,
|
||||
})
|
||||
}
|
||||
|
||||
private getExtensionsServerUrl(event: CloudBackupRequestedEvent): string {
|
||||
switch (event.payload.cloudProvider) {
|
||||
case 'ONE_DRIVE':
|
||||
return `${this.extensionsServerUrl}/onedrive/sync?type=sf&key=${event.payload.cloudProviderToken}`
|
||||
case 'GOOGLE_DRIVE':
|
||||
return `${this.extensionsServerUrl}/gdrive/sync?key=${event.payload.cloudProviderToken}`
|
||||
case 'DROPBOX':
|
||||
return `${this.extensionsServerUrl}/dropbox/items/sync?type=sf&dbt=${event.payload.cloudProviderToken}`
|
||||
default:
|
||||
throw new Error(`Unsupported cloud provider ${event.payload.cloudProvider}`)
|
||||
}
|
||||
}
|
||||
|
||||
private async getItemsForPostingToExtension(event: CloudBackupRequestedEvent): Promise<Item[]> {
|
||||
const itemQuery: ItemQuery = {
|
||||
userUuid: event.payload.userUuid,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
deleted: false,
|
||||
}
|
||||
|
||||
return this.itemRepository.findAll(itemQuery)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { DuplicateItemSyncedEvent } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { DuplicateItemSyncedEventHandler } from './DuplicateItemSyncedEventHandler'
|
||||
import { RevisionServiceInterface } from '../Revision/RevisionServiceInterface'
|
||||
|
||||
describe('DuplicateItemSyncedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let revisionService: RevisionServiceInterface
|
||||
let logger: Logger
|
||||
let duplicateItem: Item
|
||||
let originalItem: Item
|
||||
let event: DuplicateItemSyncedEvent
|
||||
|
||||
const createHandler = () => new DuplicateItemSyncedEventHandler(itemRepository, revisionService, logger)
|
||||
|
||||
beforeEach(() => {
|
||||
originalItem = {
|
||||
uuid: '1-2-3',
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
duplicateItem = {
|
||||
uuid: '2-3-4',
|
||||
duplicateOf: '1-2-3',
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findByUuidAndUserUuid = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(duplicateItem)
|
||||
.mockReturnValueOnce(originalItem)
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.warn = jest.fn()
|
||||
|
||||
revisionService = {} as jest.Mocked<RevisionServiceInterface>
|
||||
revisionService.copyRevisions = jest.fn()
|
||||
|
||||
event = {} as jest.Mocked<DuplicateItemSyncedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
userUuid: '1-2-3',
|
||||
itemUuid: '2-3-4',
|
||||
}
|
||||
})
|
||||
|
||||
it('should copy revisions from original item to the duplicate item', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(revisionService.copyRevisions).toHaveBeenCalledWith('1-2-3', '2-3-4')
|
||||
})
|
||||
|
||||
it('should not copy revisions if original item does not exist', async () => {
|
||||
itemRepository.findByUuidAndUserUuid = jest.fn().mockReturnValueOnce(duplicateItem).mockReturnValueOnce(null)
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(revisionService.copyRevisions).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not copy revisions if duplicate item does not exist', async () => {
|
||||
itemRepository.findByUuidAndUserUuid = jest.fn().mockReturnValueOnce(null).mockReturnValueOnce(originalItem)
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(revisionService.copyRevisions).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not copy revisions if duplicate item is not pointing to duplicate anything', async () => {
|
||||
duplicateItem.duplicateOf = null
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(revisionService.copyRevisions).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,40 @@
|
|||
import { DomainEventHandlerInterface, DuplicateItemSyncedEvent } from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { RevisionServiceInterface } from '../Revision/RevisionServiceInterface'
|
||||
|
||||
@injectable()
|
||||
export class DuplicateItemSyncedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.RevisionService) private revisionService: RevisionServiceInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: DuplicateItemSyncedEvent): Promise<void> {
|
||||
const item = await this.itemRepository.findByUuidAndUserUuid(event.payload.itemUuid, event.payload.userUuid)
|
||||
|
||||
if (item === null) {
|
||||
this.logger.warn(`Could not find item with uuid ${event.payload.itemUuid}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (!item.duplicateOf) {
|
||||
this.logger.warn(`Item ${event.payload.itemUuid} does not point to any duplicate`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const existingOriginalItem = await this.itemRepository.findByUuidAndUserUuid(
|
||||
item.duplicateOf,
|
||||
event.payload.userUuid,
|
||||
)
|
||||
|
||||
if (existingOriginalItem !== null) {
|
||||
await this.revisionService.copyRevisions(existingOriginalItem.uuid, item.uuid)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import {
|
||||
DomainEventPublisherInterface,
|
||||
EmailArchiveExtensionSyncedEvent,
|
||||
EmailBackupAttachmentCreatedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { EmailArchiveExtensionSyncedEventHandler } from './EmailArchiveExtensionSyncedEventHandler'
|
||||
import { ItemTransferCalculatorInterface } from '../Item/ItemTransferCalculatorInterface'
|
||||
|
||||
describe('EmailArchiveExtensionSyncedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let authHttpService: AuthHttpServiceInterface
|
||||
let itemBackupService: ItemBackupServiceInterface
|
||||
let domainEventPublisher: DomainEventPublisherInterface
|
||||
let domainEventFactory: DomainEventFactoryInterface
|
||||
const emailAttachmentMaxByteSize = 100
|
||||
let itemTransferCalculator: ItemTransferCalculatorInterface
|
||||
let item: Item
|
||||
let event: EmailArchiveExtensionSyncedEvent
|
||||
let logger: Logger
|
||||
|
||||
const createHandler = () =>
|
||||
new EmailArchiveExtensionSyncedEventHandler(
|
||||
itemRepository,
|
||||
authHttpService,
|
||||
itemBackupService,
|
||||
domainEventPublisher,
|
||||
domainEventFactory,
|
||||
emailAttachmentMaxByteSize,
|
||||
itemTransferCalculator,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
item = {} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item])
|
||||
|
||||
authHttpService = {} as jest.Mocked<AuthHttpServiceInterface>
|
||||
authHttpService.getUserKeyParams = jest.fn().mockReturnValue({ identifier: 'test@test.com' })
|
||||
authHttpService.getUserSetting = jest.fn().mockReturnValue({ uuid: '3-4-5', value: 'not_muted' })
|
||||
|
||||
event = {} as jest.Mocked<EmailArchiveExtensionSyncedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
}
|
||||
|
||||
itemBackupService = {} as jest.Mocked<ItemBackupServiceInterface>
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('backup-file-name')
|
||||
|
||||
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
|
||||
domainEventPublisher.publish = jest.fn()
|
||||
|
||||
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
|
||||
domainEventFactory.createEmailBackupAttachmentCreatedEvent = jest
|
||||
.fn()
|
||||
.mockReturnValue({} as jest.Mocked<EmailBackupAttachmentCreatedEvent>)
|
||||
|
||||
itemTransferCalculator = {} as jest.Mocked<ItemTransferCalculatorInterface>
|
||||
itemTransferCalculator.computeItemUuidBundlesToFetch = jest.fn().mockReturnValue([['1-2-3']])
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.debug = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
})
|
||||
|
||||
it('should inform that backup attachment for email was created', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(1)
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenCalledWith({
|
||||
backupFileIndex: 1,
|
||||
backupFileName: 'backup-file-name',
|
||||
backupFilesTotal: 1,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
})
|
||||
|
||||
it('should inform that multipart backup attachment for email was created', async () => {
|
||||
itemBackupService.backup = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce('backup-file-name-1')
|
||||
.mockReturnValueOnce('backup-file-name-2')
|
||||
itemTransferCalculator.computeItemUuidBundlesToFetch = jest.fn().mockReturnValue([['1-2-3'], ['2-3-4']])
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(2)
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenNthCalledWith(1, {
|
||||
backupFileIndex: 1,
|
||||
backupFileName: 'backup-file-name-1',
|
||||
backupFilesTotal: 2,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenNthCalledWith(2, {
|
||||
backupFileIndex: 2,
|
||||
backupFileName: 'backup-file-name-2',
|
||||
backupFilesTotal: 2,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not inform that backup attachment for email was created if user key params cannot be obtained', async () => {
|
||||
authHttpService.getUserKeyParams = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Oops!')
|
||||
})
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not inform that backup attachment for email was created if backup file name is empty', async () => {
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('')
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,80 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
EmailArchiveExtensionSyncedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ItemQuery } from '../Item/ItemQuery'
|
||||
import { ItemTransferCalculatorInterface } from '../Item/ItemTransferCalculatorInterface'
|
||||
|
||||
@injectable()
|
||||
export class EmailArchiveExtensionSyncedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.AuthHttpService) private authHttpService: AuthHttpServiceInterface,
|
||||
@inject(TYPES.ItemBackupService) private itemBackupService: ItemBackupServiceInterface,
|
||||
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
@inject(TYPES.EMAIL_ATTACHMENT_MAX_BYTE_SIZE) private emailAttachmentMaxByteSize: number,
|
||||
@inject(TYPES.ItemTransferCalculator) private itemTransferCalculator: ItemTransferCalculatorInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: EmailArchiveExtensionSyncedEvent): Promise<void> {
|
||||
let authParams: KeyParamsData
|
||||
try {
|
||||
authParams = await this.authHttpService.getUserKeyParams({
|
||||
uuid: event.payload.userUuid,
|
||||
authenticated: false,
|
||||
})
|
||||
} catch (error) {
|
||||
this.logger.warn(`Could not get user key params from auth service: ${(error as Error).message}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const itemQuery: ItemQuery = {
|
||||
userUuid: event.payload.userUuid,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
deleted: false,
|
||||
}
|
||||
const itemUuidBundles = await this.itemTransferCalculator.computeItemUuidBundlesToFetch(
|
||||
itemQuery,
|
||||
this.emailAttachmentMaxByteSize,
|
||||
)
|
||||
|
||||
let bundleIndex = 1
|
||||
for (const itemUuidBundle of itemUuidBundles) {
|
||||
const items = await this.itemRepository.findAll({
|
||||
uuids: itemUuidBundle,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
|
||||
const backupFileName = await this.itemBackupService.backup(items, authParams)
|
||||
|
||||
this.logger.debug(`Data backed up into: ${backupFileName}`)
|
||||
|
||||
if (backupFileName.length !== 0) {
|
||||
this.logger.debug('Publishing EMAIL_BACKUP_ATTACHMENT_CREATED event')
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createEmailBackupAttachmentCreatedEvent({
|
||||
backupFileName,
|
||||
backupFileIndex: bundleIndex++,
|
||||
backupFilesTotal: itemUuidBundles.length,
|
||||
email: authParams.identifier as string,
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,136 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import {
|
||||
DomainEventPublisherInterface,
|
||||
EmailBackupRequestedEvent,
|
||||
EmailBackupAttachmentCreatedEvent,
|
||||
MailBackupAttachmentTooBigEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { EmailBackupRequestedEventHandler } from './EmailBackupRequestedEventHandler'
|
||||
import { ItemTransferCalculatorInterface } from '../Item/ItemTransferCalculatorInterface'
|
||||
|
||||
describe('EmailBackupRequestedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let authHttpService: AuthHttpServiceInterface
|
||||
let itemBackupService: ItemBackupServiceInterface
|
||||
let domainEventPublisher: DomainEventPublisherInterface
|
||||
let domainEventFactory: DomainEventFactoryInterface
|
||||
const emailAttachmentMaxByteSize = 100
|
||||
let itemTransferCalculator: ItemTransferCalculatorInterface
|
||||
let item: Item
|
||||
let event: EmailBackupRequestedEvent
|
||||
let logger: Logger
|
||||
|
||||
const createHandler = () =>
|
||||
new EmailBackupRequestedEventHandler(
|
||||
itemRepository,
|
||||
authHttpService,
|
||||
itemBackupService,
|
||||
domainEventPublisher,
|
||||
domainEventFactory,
|
||||
emailAttachmentMaxByteSize,
|
||||
itemTransferCalculator,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
item = {} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item])
|
||||
|
||||
authHttpService = {} as jest.Mocked<AuthHttpServiceInterface>
|
||||
authHttpService.getUserKeyParams = jest.fn().mockReturnValue({ identifier: 'test@test.com' })
|
||||
|
||||
event = {} as jest.Mocked<EmailBackupRequestedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
userUuid: '1-2-3',
|
||||
userHasEmailsMuted: false,
|
||||
muteEmailsSettingUuid: '1-2-3',
|
||||
}
|
||||
|
||||
itemBackupService = {} as jest.Mocked<ItemBackupServiceInterface>
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('backup-file-name')
|
||||
|
||||
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
|
||||
domainEventPublisher.publish = jest.fn()
|
||||
|
||||
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
|
||||
domainEventFactory.createEmailBackupAttachmentCreatedEvent = jest
|
||||
.fn()
|
||||
.mockReturnValue({} as jest.Mocked<EmailBackupAttachmentCreatedEvent>)
|
||||
domainEventFactory.createMailBackupAttachmentTooBigEvent = jest
|
||||
.fn()
|
||||
.mockReturnValue({} as jest.Mocked<MailBackupAttachmentTooBigEvent>)
|
||||
|
||||
itemTransferCalculator = {} as jest.Mocked<ItemTransferCalculatorInterface>
|
||||
itemTransferCalculator.computeItemUuidBundlesToFetch = jest.fn().mockReturnValue([['1-2-3']])
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.debug = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
})
|
||||
|
||||
it('should inform that backup attachment for email was created', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(1)
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenCalledWith({
|
||||
backupFileIndex: 1,
|
||||
backupFileName: 'backup-file-name',
|
||||
backupFilesTotal: 1,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
})
|
||||
|
||||
it('should inform that multipart backup attachment for email was created', async () => {
|
||||
itemBackupService.backup = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce('backup-file-name-1')
|
||||
.mockReturnValueOnce('backup-file-name-2')
|
||||
itemTransferCalculator.computeItemUuidBundlesToFetch = jest.fn().mockReturnValue([['1-2-3'], ['2-3-4']])
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(2)
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenNthCalledWith(1, {
|
||||
backupFileIndex: 1,
|
||||
backupFileName: 'backup-file-name-1',
|
||||
backupFilesTotal: 2,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).toHaveBeenNthCalledWith(2, {
|
||||
backupFileIndex: 2,
|
||||
backupFileName: 'backup-file-name-2',
|
||||
backupFilesTotal: 2,
|
||||
email: 'test@test.com',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not inform that backup attachment for email was created if user key params cannot be obtained', async () => {
|
||||
authHttpService.getUserKeyParams = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Oops!')
|
||||
})
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not inform that backup attachment for email was created if backup file name is empty', async () => {
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('')
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
|
||||
expect(domainEventFactory.createEmailBackupAttachmentCreatedEvent).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,80 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
EmailBackupRequestedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ItemTransferCalculatorInterface } from '../Item/ItemTransferCalculatorInterface'
|
||||
import { ItemQuery } from '../Item/ItemQuery'
|
||||
|
||||
@injectable()
|
||||
export class EmailBackupRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.AuthHttpService) private authHttpService: AuthHttpServiceInterface,
|
||||
@inject(TYPES.ItemBackupService) private itemBackupService: ItemBackupServiceInterface,
|
||||
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
@inject(TYPES.EMAIL_ATTACHMENT_MAX_BYTE_SIZE) private emailAttachmentMaxByteSize: number,
|
||||
@inject(TYPES.ItemTransferCalculator) private itemTransferCalculator: ItemTransferCalculatorInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: EmailBackupRequestedEvent): Promise<void> {
|
||||
let authParams: KeyParamsData
|
||||
try {
|
||||
authParams = await this.authHttpService.getUserKeyParams({
|
||||
uuid: event.payload.userUuid,
|
||||
authenticated: false,
|
||||
})
|
||||
} catch (error) {
|
||||
this.logger.warn(`Could not get user key params from auth service: ${(error as Error).message}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const itemQuery: ItemQuery = {
|
||||
userUuid: event.payload.userUuid,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
deleted: false,
|
||||
}
|
||||
const itemUuidBundles = await this.itemTransferCalculator.computeItemUuidBundlesToFetch(
|
||||
itemQuery,
|
||||
this.emailAttachmentMaxByteSize,
|
||||
)
|
||||
|
||||
let bundleIndex = 1
|
||||
for (const itemUuidBundle of itemUuidBundles) {
|
||||
const items = await this.itemRepository.findAll({
|
||||
uuids: itemUuidBundle,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
|
||||
const backupFileName = await this.itemBackupService.backup(items, authParams)
|
||||
|
||||
this.logger.debug(`Data backed up into: ${backupFileName}`)
|
||||
|
||||
if (backupFileName.length !== 0) {
|
||||
this.logger.debug('Publishing EMAIL_BACKUP_ATTACHMENT_CREATED event')
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createEmailBackupAttachmentCreatedEvent({
|
||||
backupFileName,
|
||||
backupFileIndex: bundleIndex++,
|
||||
backupFilesTotal: itemUuidBundles.length,
|
||||
email: authParams.identifier as string,
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,162 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { ItemsSyncedEvent } from '@standardnotes/domain-events'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ItemsSyncedEventHandler } from './ItemsSyncedEventHandler'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { ExtensionsHttpServiceInterface } from '../Extension/ExtensionsHttpServiceInterface'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
describe('ItemsSyncedEventHandler', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let authHttpService: AuthHttpServiceInterface
|
||||
let extensionsHttpService: ExtensionsHttpServiceInterface
|
||||
let itemBackupService: ItemBackupServiceInterface
|
||||
let internalDNSRerouteEnabled = false
|
||||
const extensionsServerUrl = 'https://extensions-server'
|
||||
let event: ItemsSyncedEvent
|
||||
let item: Item
|
||||
let logger: Logger
|
||||
|
||||
const createHandler = () =>
|
||||
new ItemsSyncedEventHandler(
|
||||
itemRepository,
|
||||
authHttpService,
|
||||
extensionsHttpService,
|
||||
itemBackupService,
|
||||
internalDNSRerouteEnabled,
|
||||
extensionsServerUrl,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
item = {} as jest.Mocked<Item>
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item])
|
||||
|
||||
authHttpService = {} as jest.Mocked<AuthHttpServiceInterface>
|
||||
authHttpService.getUserKeyParams = jest.fn().mockReturnValue({ foo: 'bar' })
|
||||
|
||||
extensionsHttpService = {} as jest.Mocked<ExtensionsHttpServiceInterface>
|
||||
extensionsHttpService.sendItemsToExtensionsServer = jest.fn()
|
||||
|
||||
event = {} as jest.Mocked<ItemsSyncedEvent>
|
||||
event.createdAt = new Date(1)
|
||||
event.payload = {
|
||||
userUuid: '1-2-3',
|
||||
extensionId: '2-3-4',
|
||||
extensionUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
itemUuids: ['4-5-6'],
|
||||
skipFileBackup: false,
|
||||
source: 'realtime-extensions-sync',
|
||||
}
|
||||
|
||||
itemBackupService = {} as jest.Mocked<ItemBackupServiceInterface>
|
||||
itemBackupService.backup = jest.fn().mockReturnValue('backup-file-name')
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.debug = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
})
|
||||
|
||||
it('should send synced items to extensions server', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
uuids: ['4-5-6'],
|
||||
})
|
||||
|
||||
expect(extensionsHttpService.sendItemsToExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should skip sending synced items to extensions server if user key params cannot be obtained', async () => {
|
||||
authHttpService.getUserKeyParams = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Oops!')
|
||||
})
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(extensionsHttpService.sendItemsToExtensionsServer).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send synced items to extensions server with skipped file backup', async () => {
|
||||
event.payload.skipFileBackup = true
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
uuids: ['4-5-6'],
|
||||
})
|
||||
|
||||
expect(extensionsHttpService.sendItemsToExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: '',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
items: [item],
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should send all undeleted items to extensions server if none specified', async () => {
|
||||
event.payload.itemUuids = []
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
deleted: false,
|
||||
})
|
||||
|
||||
expect(extensionsHttpService.sendItemsToExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension1',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should replace the Standard Notes extensions server url with internal URL if internal DNS reroute is enabled', async () => {
|
||||
internalDNSRerouteEnabled = true
|
||||
;(event.payload.extensionUrl = 'https://extensions.standardnotes.org/extension2'),
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(extensionsHttpService.sendItemsToExtensionsServer).toHaveBeenCalledWith({
|
||||
authParams: {
|
||||
foo: 'bar',
|
||||
},
|
||||
backupFilename: 'backup-file-name',
|
||||
extensionId: '2-3-4',
|
||||
extensionsServerUrl: 'https://extensions-server/extension2',
|
||||
forceMute: false,
|
||||
userUuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,82 @@
|
|||
import { DomainEventHandlerInterface, ItemsSyncedEvent } from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { ItemQuery } from '../Item/ItemQuery'
|
||||
import { AuthHttpServiceInterface } from '../Auth/AuthHttpServiceInterface'
|
||||
import { Item } from '../Item/Item'
|
||||
import { ExtensionsHttpServiceInterface } from '../Extension/ExtensionsHttpServiceInterface'
|
||||
import { ItemBackupServiceInterface } from '../Item/ItemBackupServiceInterface'
|
||||
import { Logger } from 'winston'
|
||||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
|
||||
@injectable()
|
||||
export class ItemsSyncedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.AuthHttpService) private authHttpService: AuthHttpServiceInterface,
|
||||
@inject(TYPES.ExtensionsHttpService) private extensionsHttpService: ExtensionsHttpServiceInterface,
|
||||
@inject(TYPES.ItemBackupService) private itemBackupService: ItemBackupServiceInterface,
|
||||
@inject(TYPES.INTERNAL_DNS_REROUTE_ENABLED) private internalDNSRerouteEnabled: boolean,
|
||||
@inject(TYPES.EXTENSIONS_SERVER_URL) private extensionsServerUrl: string,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: ItemsSyncedEvent): Promise<void> {
|
||||
const items = await this.getItemsForPostingToExtension(event)
|
||||
|
||||
let authParams: KeyParamsData
|
||||
try {
|
||||
authParams = await this.authHttpService.getUserKeyParams({
|
||||
uuid: event.payload.userUuid,
|
||||
authenticated: false,
|
||||
})
|
||||
} catch (error) {
|
||||
this.logger.warn(`Could not get user key params from auth service: ${(error as Error).message}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
let backupFilename = ''
|
||||
if (!event.payload.skipFileBackup) {
|
||||
backupFilename = await this.itemBackupService.backup(items, authParams)
|
||||
}
|
||||
const backingUpViaProxyFile = backupFilename !== ''
|
||||
|
||||
this.logger.debug(`Sending ${items.length} items to extensions server for user ${event.payload.userUuid}`)
|
||||
|
||||
await this.extensionsHttpService.sendItemsToExtensionsServer({
|
||||
items: backingUpViaProxyFile ? undefined : items,
|
||||
authParams,
|
||||
backupFilename,
|
||||
forceMute: event.payload.forceMute,
|
||||
extensionsServerUrl: this.getExtensionsServerUrl(event),
|
||||
userUuid: event.payload.userUuid,
|
||||
extensionId: event.payload.extensionId,
|
||||
})
|
||||
}
|
||||
|
||||
private getExtensionsServerUrl(event: ItemsSyncedEvent): string {
|
||||
if (this.internalDNSRerouteEnabled) {
|
||||
return event.payload.extensionUrl.replace('https://extensions.standardnotes.org', this.extensionsServerUrl)
|
||||
}
|
||||
|
||||
return event.payload.extensionUrl
|
||||
}
|
||||
|
||||
private async getItemsForPostingToExtension(event: ItemsSyncedEvent): Promise<Item[]> {
|
||||
const itemQuery: ItemQuery = {
|
||||
userUuid: event.payload.userUuid,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
}
|
||||
if (event.payload.itemUuids.length) {
|
||||
itemQuery.uuids = event.payload.itemUuids
|
||||
} else {
|
||||
itemQuery.deleted = false
|
||||
}
|
||||
|
||||
return this.itemRepository.findAll(itemQuery)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { ContentDecoder } from './ContentDecoder'
|
||||
|
||||
describe('ContentDecoder', () => {
|
||||
const createDecoder = () => new ContentDecoder()
|
||||
|
||||
it('should decode content', () => {
|
||||
const content = '000eyJmb28iOiJiYXIifQ=='
|
||||
|
||||
expect(createDecoder().decode(content)).toEqual({
|
||||
foo: 'bar',
|
||||
})
|
||||
})
|
||||
|
||||
it('should encode content', () => {
|
||||
expect(
|
||||
createDecoder().encode({
|
||||
foo: 'bar',
|
||||
}),
|
||||
).toEqual('000eyJmb28iOiJiYXIifQ==')
|
||||
})
|
||||
|
||||
it('should return empty object on decoding failure', () => {
|
||||
const content = '032400eyJmb28iOiJiYXIifQ=='
|
||||
|
||||
expect(createDecoder().decode(content)).toEqual({})
|
||||
})
|
||||
})
|
22
packages/syncing-server/src/Domain/Item/ContentDecoder.ts
Normal file
22
packages/syncing-server/src/Domain/Item/ContentDecoder.ts
Normal file
|
@ -0,0 +1,22 @@
|
|||
import { injectable } from 'inversify'
|
||||
import { ContentDecoderInterface } from './ContentDecoderInterface'
|
||||
|
||||
@injectable()
|
||||
export class ContentDecoder implements ContentDecoderInterface {
|
||||
decode(content: string): Record<string, unknown> {
|
||||
try {
|
||||
const contentBuffer = Buffer.from(content.substring(3), 'base64')
|
||||
const decodedContent = contentBuffer.toString()
|
||||
|
||||
return JSON.parse(decodedContent)
|
||||
} catch (error) {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
encode(content: Record<string, unknown>): string | undefined {
|
||||
const stringifiedContent = JSON.stringify(content)
|
||||
|
||||
return `000${Buffer.from(stringifiedContent).toString('base64')}`
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
export interface ContentDecoderInterface {
|
||||
decode(content: string): Record<string, unknown>
|
||||
encode(content: Record<string, unknown>): string | undefined
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import { ContentType } from '@standardnotes/common'
|
||||
import { IntegrityPayload } from '@standardnotes/payloads'
|
||||
|
||||
export type ExtendedIntegrityPayload = IntegrityPayload & {
|
||||
content_type: ContentType
|
||||
}
|
7
packages/syncing-server/src/Domain/Item/GetItemsDTO.ts
Normal file
7
packages/syncing-server/src/Domain/Item/GetItemsDTO.ts
Normal file
|
@ -0,0 +1,7 @@
|
|||
export type GetItemsDTO = {
|
||||
userUuid: string
|
||||
syncToken?: string | null
|
||||
cursorToken?: string | null
|
||||
limit?: number
|
||||
contentType?: string
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import { Item } from './Item'
|
||||
|
||||
export type GetItemsResult = {
|
||||
items: Array<Item>
|
||||
cursorToken?: string
|
||||
}
|
128
packages/syncing-server/src/Domain/Item/Item.ts
Normal file
128
packages/syncing-server/src/Domain/Item/Item.ts
Normal file
|
@ -0,0 +1,128 @@
|
|||
import { ContentType, Uuid } from '@standardnotes/common'
|
||||
import { Column, Entity, Index, OneToMany, PrimaryGeneratedColumn } from 'typeorm'
|
||||
import { Revision } from '../Revision/Revision'
|
||||
|
||||
@Entity({ name: 'items' })
|
||||
@Index('index_items_on_user_uuid_and_content_type', ['userUuid', 'contentType'])
|
||||
@Index('user_uuid_and_updated_at_timestamp_and_created_at_timestamp', [
|
||||
'userUuid',
|
||||
'updatedAtTimestamp',
|
||||
'createdAtTimestamp',
|
||||
])
|
||||
@Index('user_uuid_and_deleted', ['userUuid', 'deleted'])
|
||||
export class Item {
|
||||
@PrimaryGeneratedColumn('uuid')
|
||||
declare uuid: string
|
||||
|
||||
@Column({
|
||||
type: 'varchar',
|
||||
name: 'duplicate_of',
|
||||
length: 36,
|
||||
nullable: true,
|
||||
})
|
||||
declare duplicateOf: string | null
|
||||
|
||||
@Column({
|
||||
type: 'varchar',
|
||||
name: 'items_key_id',
|
||||
length: 255,
|
||||
nullable: true,
|
||||
})
|
||||
declare itemsKeyId: string | null
|
||||
|
||||
@Column({
|
||||
type: 'mediumtext',
|
||||
nullable: true,
|
||||
})
|
||||
declare content: string | null
|
||||
|
||||
@Column({
|
||||
name: 'content_type',
|
||||
type: 'varchar',
|
||||
length: 255,
|
||||
nullable: true,
|
||||
})
|
||||
@Index('index_items_on_content_type')
|
||||
declare contentType: ContentType | null
|
||||
|
||||
@Column({
|
||||
name: 'content_size',
|
||||
type: 'int',
|
||||
nullable: true,
|
||||
})
|
||||
declare contentSize: number | null
|
||||
|
||||
@Column({
|
||||
name: 'enc_item_key',
|
||||
type: 'text',
|
||||
nullable: true,
|
||||
})
|
||||
declare encItemKey: string | null
|
||||
|
||||
@Column({
|
||||
name: 'auth_hash',
|
||||
type: 'varchar',
|
||||
length: 255,
|
||||
nullable: true,
|
||||
})
|
||||
declare authHash: string | null
|
||||
|
||||
@Column({
|
||||
name: 'user_uuid',
|
||||
length: 36,
|
||||
})
|
||||
@Index('index_items_on_user_uuid')
|
||||
declare userUuid: string
|
||||
|
||||
@Column({
|
||||
type: 'tinyint',
|
||||
precision: 1,
|
||||
nullable: true,
|
||||
default: 0,
|
||||
})
|
||||
@Index('index_items_on_deleted')
|
||||
declare deleted: boolean
|
||||
|
||||
@Column({
|
||||
name: 'created_at',
|
||||
type: 'datetime',
|
||||
precision: 6,
|
||||
})
|
||||
declare createdAt: Date
|
||||
|
||||
@Column({
|
||||
name: 'updated_at',
|
||||
type: 'datetime',
|
||||
precision: 6,
|
||||
})
|
||||
declare updatedAt: Date
|
||||
|
||||
@Column({
|
||||
name: 'created_at_timestamp',
|
||||
type: 'bigint',
|
||||
})
|
||||
declare createdAtTimestamp: number
|
||||
|
||||
@Column({
|
||||
name: 'updated_at_timestamp',
|
||||
type: 'bigint',
|
||||
})
|
||||
@Index('updated_at_timestamp')
|
||||
declare updatedAtTimestamp: number
|
||||
|
||||
@OneToMany(
|
||||
/* istanbul ignore next */
|
||||
() => Revision,
|
||||
/* istanbul ignore next */
|
||||
(revision) => revision.item,
|
||||
)
|
||||
declare revisions: Promise<Revision[]>
|
||||
|
||||
@Column({
|
||||
name: 'updated_with_session',
|
||||
type: 'varchar',
|
||||
length: 36,
|
||||
nullable: true,
|
||||
})
|
||||
declare updatedWithSession: Uuid | null
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
import { KeyParamsData } from '@standardnotes/responses'
|
||||
import { Item } from './Item'
|
||||
|
||||
export interface ItemBackupServiceInterface {
|
||||
backup(items: Array<Item>, authParams: KeyParamsData): Promise<string>
|
||||
}
|
9
packages/syncing-server/src/Domain/Item/ItemConflict.ts
Normal file
9
packages/syncing-server/src/Domain/Item/ItemConflict.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
import { ConflictType } from '@standardnotes/responses'
|
||||
import { Item } from './Item'
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
export type ItemConflict = {
|
||||
serverItem?: Item
|
||||
unsavedItem?: ItemHash
|
||||
type: ConflictType
|
||||
}
|
177
packages/syncing-server/src/Domain/Item/ItemFactory.spec.ts
Normal file
177
packages/syncing-server/src/Domain/Item/ItemFactory.spec.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { Timer, TimerInterface } from '@standardnotes/time'
|
||||
import { ContentType } from '@standardnotes/common'
|
||||
|
||||
import { ItemFactory } from './ItemFactory'
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
describe('ItemFactory', () => {
|
||||
let timer: TimerInterface
|
||||
let timeHelper: Timer
|
||||
|
||||
const createFactory = () => new ItemFactory(timer)
|
||||
|
||||
beforeEach(() => {
|
||||
timeHelper = new Timer()
|
||||
|
||||
timer = {} as jest.Mocked<TimerInterface>
|
||||
timer.getTimestampInMicroseconds = jest.fn().mockReturnValue(1616164633241568)
|
||||
timer.convertMicrosecondsToDate = jest
|
||||
.fn()
|
||||
.mockImplementation((microseconds: number) => timeHelper.convertMicrosecondsToDate(microseconds))
|
||||
timer.convertStringDateToMicroseconds = jest
|
||||
.fn()
|
||||
.mockImplementation((date: string) => timeHelper.convertStringDateToMicroseconds(date))
|
||||
timer.convertStringDateToDate = jest
|
||||
.fn()
|
||||
.mockImplementation((date: string) => timeHelper.convertStringDateToDate(date))
|
||||
})
|
||||
|
||||
it('should create an item based on item hash', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().create({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
createdAtTimestamp: 1616164633241568,
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
updatedAt: expect.any(Date),
|
||||
updatedAtTimestamp: 1616164633241568,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
contentSize: 0,
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a stub item based on item hash with update_at date and timestamps overwritten', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
updated_at: '2021-03-25T09:37:37.943Z',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().createStub({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
createdAtTimestamp: 1616164633241568,
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
updatedAt: new Date('2021-03-25T09:37:37.943Z'),
|
||||
updatedAtTimestamp: 1616665057943000,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
content: null,
|
||||
contentSize: 0,
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a stub item based on item hash with update_at_timestamp date and timestamps overwritten', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
updated_at_timestamp: 1616164633241568,
|
||||
content: 'foobar',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().createStub({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
createdAtTimestamp: 1616164633241568,
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
updatedAt: new Date('2021-03-19T14:37:13.241Z'),
|
||||
updatedAtTimestamp: 1616164633241568,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
content: 'foobar',
|
||||
contentSize: 6,
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a stub item based on item hash without updated timestamps', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().createStub({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
createdAtTimestamp: 1616164633241568,
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
updatedAt: expect.any(Date),
|
||||
updatedAtTimestamp: 1616164633241568,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
content: null,
|
||||
contentSize: 0,
|
||||
})
|
||||
})
|
||||
|
||||
it('should create an item based on item hash with all fields filled', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
content: 'asdqwe1',
|
||||
content_type: ContentType.Note,
|
||||
duplicate_of: '222',
|
||||
auth_hash: 'aaa',
|
||||
deleted: true,
|
||||
enc_item_key: 'qweqwe1',
|
||||
items_key_id: 'asdasd1',
|
||||
created_at: timeHelper.formatDate(new Date(1616164633241), 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'),
|
||||
updated_at: timeHelper.formatDate(new Date(1616164633242), 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'),
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().create({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
createdAtTimestamp: 1616164633241000,
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
authHash: 'aaa',
|
||||
deleted: true,
|
||||
duplicateOf: '222',
|
||||
updatedAt: expect.any(Date),
|
||||
updatedAtTimestamp: 1616164633241568,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create an item based on item hash with created at timestamp', () => {
|
||||
const itemHash = {
|
||||
uuid: '1-2-3',
|
||||
content: 'asdqwe1',
|
||||
content_type: ContentType.Note,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'qweqwe1',
|
||||
items_key_id: 'asdasd1',
|
||||
created_at_timestamp: 1616164633241312,
|
||||
updated_at: timeHelper.formatDate(new Date(1616164633242), 'YYYY-MM-DDTHH:mm:ss.SSS[Z]'),
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const item = createFactory().create({ userUuid: 'a-b-c', itemHash, sessionUuid: '1-2-3' })
|
||||
|
||||
expect(item).toEqual({
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAt: expect.any(Date),
|
||||
updatedWithSession: '1-2-3',
|
||||
createdAtTimestamp: 1616164633241312,
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
updatedAt: expect.any(Date),
|
||||
updatedAtTimestamp: 1616164633241568,
|
||||
userUuid: 'a-b-c',
|
||||
uuid: '1-2-3',
|
||||
})
|
||||
})
|
||||
})
|
80
packages/syncing-server/src/Domain/Item/ItemFactory.ts
Normal file
80
packages/syncing-server/src/Domain/Item/ItemFactory.ts
Normal file
|
@ -0,0 +1,80 @@
|
|||
import { Uuid } from '@standardnotes/common'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { inject, injectable } from 'inversify'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { Item } from './Item'
|
||||
import { ItemFactoryInterface } from './ItemFactoryInterface'
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
@injectable()
|
||||
export class ItemFactory implements ItemFactoryInterface {
|
||||
constructor(@inject(TYPES.Timer) private timer: TimerInterface) {}
|
||||
|
||||
createStub(dto: { userUuid: string; itemHash: ItemHash; sessionUuid: Uuid | null }): Item {
|
||||
const item = this.create(dto)
|
||||
|
||||
if (dto.itemHash.content === undefined) {
|
||||
item.content = null
|
||||
}
|
||||
|
||||
if (dto.itemHash.updated_at_timestamp) {
|
||||
item.updatedAtTimestamp = dto.itemHash.updated_at_timestamp
|
||||
item.updatedAt = this.timer.convertMicrosecondsToDate(dto.itemHash.updated_at_timestamp)
|
||||
} else if (dto.itemHash.updated_at) {
|
||||
item.updatedAtTimestamp = this.timer.convertStringDateToMicroseconds(dto.itemHash.updated_at)
|
||||
item.updatedAt = this.timer.convertStringDateToDate(dto.itemHash.updated_at)
|
||||
}
|
||||
|
||||
return item
|
||||
}
|
||||
|
||||
create(dto: { userUuid: string; itemHash: ItemHash; sessionUuid: Uuid | null }): Item {
|
||||
const newItem = new Item()
|
||||
newItem.uuid = dto.itemHash.uuid
|
||||
newItem.updatedWithSession = dto.sessionUuid
|
||||
newItem.contentSize = 0
|
||||
if (dto.itemHash.content) {
|
||||
newItem.content = dto.itemHash.content
|
||||
newItem.contentSize = Buffer.byteLength(dto.itemHash.content)
|
||||
}
|
||||
newItem.userUuid = dto.userUuid
|
||||
if (dto.itemHash.content_type) {
|
||||
newItem.contentType = dto.itemHash.content_type
|
||||
}
|
||||
if (dto.itemHash.enc_item_key) {
|
||||
newItem.encItemKey = dto.itemHash.enc_item_key
|
||||
}
|
||||
if (dto.itemHash.items_key_id) {
|
||||
newItem.itemsKeyId = dto.itemHash.items_key_id
|
||||
}
|
||||
if (dto.itemHash.duplicate_of) {
|
||||
newItem.duplicateOf = dto.itemHash.duplicate_of
|
||||
}
|
||||
if (dto.itemHash.deleted !== undefined) {
|
||||
newItem.deleted = dto.itemHash.deleted
|
||||
}
|
||||
if (dto.itemHash.auth_hash) {
|
||||
newItem.authHash = dto.itemHash.auth_hash
|
||||
}
|
||||
|
||||
const now = this.timer.getTimestampInMicroseconds()
|
||||
const nowDate = this.timer.convertMicrosecondsToDate(now)
|
||||
|
||||
newItem.updatedAtTimestamp = now
|
||||
newItem.updatedAt = nowDate
|
||||
|
||||
newItem.createdAtTimestamp = now
|
||||
newItem.createdAt = nowDate
|
||||
|
||||
if (dto.itemHash.created_at_timestamp) {
|
||||
newItem.createdAtTimestamp = dto.itemHash.created_at_timestamp
|
||||
newItem.createdAt = this.timer.convertMicrosecondsToDate(dto.itemHash.created_at_timestamp)
|
||||
} else if (dto.itemHash.created_at) {
|
||||
newItem.createdAtTimestamp = this.timer.convertStringDateToMicroseconds(dto.itemHash.created_at)
|
||||
newItem.createdAt = this.timer.convertStringDateToDate(dto.itemHash.created_at)
|
||||
}
|
||||
|
||||
return newItem
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
import { Uuid } from '@standardnotes/common'
|
||||
|
||||
import { Item } from './Item'
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
export interface ItemFactoryInterface {
|
||||
create(dto: { userUuid: string; itemHash: ItemHash; sessionUuid: Uuid | null }): Item
|
||||
createStub(dto: { userUuid: string; itemHash: ItemHash; sessionUuid: Uuid | null }): Item
|
||||
}
|
16
packages/syncing-server/src/Domain/Item/ItemHash.ts
Normal file
16
packages/syncing-server/src/Domain/Item/ItemHash.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
import { ContentType } from '@standardnotes/common'
|
||||
|
||||
export type ItemHash = {
|
||||
uuid: string
|
||||
content?: string
|
||||
content_type: ContentType
|
||||
deleted?: boolean
|
||||
duplicate_of?: string | null
|
||||
auth_hash?: string
|
||||
enc_item_key?: string
|
||||
items_key_id?: string
|
||||
created_at?: string
|
||||
created_at_timestamp?: number
|
||||
updated_at?: string
|
||||
updated_at_timestamp?: number
|
||||
}
|
12
packages/syncing-server/src/Domain/Item/ItemQuery.ts
Normal file
12
packages/syncing-server/src/Domain/Item/ItemQuery.ts
Normal file
|
@ -0,0 +1,12 @@
|
|||
export type ItemQuery = {
|
||||
userUuid?: string
|
||||
sortBy: string
|
||||
sortOrder: 'ASC' | 'DESC'
|
||||
uuids?: Array<string>
|
||||
lastSyncTime?: number
|
||||
syncTimeComparison?: '>' | '>='
|
||||
contentType?: string
|
||||
deleted?: boolean
|
||||
offset?: number
|
||||
limit?: number
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
import { Item } from './Item'
|
||||
import { ItemQuery } from './ItemQuery'
|
||||
import { ReadStream } from 'fs'
|
||||
import { ExtendedIntegrityPayload } from './ExtendedIntegrityPayload'
|
||||
|
||||
export interface ItemRepositoryInterface {
|
||||
deleteByUserUuid(userUuid: string): Promise<void>
|
||||
findAll(query: ItemQuery): Promise<Item[]>
|
||||
streamAll(query: ItemQuery): Promise<ReadStream>
|
||||
countAll(query: ItemQuery): Promise<number>
|
||||
findContentSizeForComputingTransferLimit(
|
||||
query: ItemQuery,
|
||||
): Promise<Array<{ uuid: string; contentSize: number | null }>>
|
||||
findDatesForComputingIntegrityHash(userUuid: string): Promise<Array<{ updated_at_timestamp: number }>>
|
||||
findItemsForComputingIntegrityPayloads(userUuid: string): Promise<ExtendedIntegrityPayload[]>
|
||||
findByUuidAndUserUuid(uuid: string, userUuid: string): Promise<Item | null>
|
||||
findByUuid(uuid: string): Promise<Item | null>
|
||||
remove(item: Item): Promise<Item>
|
||||
save(item: Item): Promise<Item>
|
||||
markItemsAsDeleted(itemUuids: Array<string>, updatedAtTimestamp: number): Promise<void>
|
||||
updateContentSize(itemUuid: string, contentSize: number): Promise<void>
|
||||
}
|
917
packages/syncing-server/src/Domain/Item/ItemService.spec.ts
Normal file
917
packages/syncing-server/src/Domain/Item/ItemService.spec.ts
Normal file
|
@ -0,0 +1,917 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { ContentType } from '@standardnotes/common'
|
||||
import { Item } from './Item'
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
import { ItemRepositoryInterface } from './ItemRepositoryInterface'
|
||||
import { ItemService } from './ItemService'
|
||||
import { ApiVersion } from '../Api/ApiVersion'
|
||||
import { RevisionServiceInterface } from '../Revision/RevisionServiceInterface'
|
||||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { Logger } from 'winston'
|
||||
import { Timer, TimerInterface } from '@standardnotes/time'
|
||||
import { ItemSaveValidatorInterface } from './SaveValidator/ItemSaveValidatorInterface'
|
||||
import { ItemFactoryInterface } from './ItemFactoryInterface'
|
||||
import { ItemConflict } from './ItemConflict'
|
||||
import { ItemTransferCalculatorInterface } from './ItemTransferCalculatorInterface'
|
||||
|
||||
describe('ItemService', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let revisionService: RevisionServiceInterface
|
||||
let domainEventPublisher: DomainEventPublisherInterface
|
||||
let domainEventFactory: DomainEventFactoryInterface
|
||||
const revisionFrequency = 300
|
||||
const contentSizeTransferLimit = 100
|
||||
let timer: TimerInterface
|
||||
let item1: Item
|
||||
let item2: Item
|
||||
let itemHash1: ItemHash
|
||||
let itemHash2: ItemHash
|
||||
let emptyHash: ItemHash
|
||||
let syncToken: string
|
||||
let logger: Logger
|
||||
let itemSaveValidator: ItemSaveValidatorInterface
|
||||
let newItem: Item
|
||||
let itemFactory: ItemFactoryInterface
|
||||
let timeHelper: Timer
|
||||
let itemTransferCalculator: ItemTransferCalculatorInterface
|
||||
|
||||
const createService = () =>
|
||||
new ItemService(
|
||||
itemSaveValidator,
|
||||
itemFactory,
|
||||
itemRepository,
|
||||
revisionService,
|
||||
domainEventPublisher,
|
||||
domainEventFactory,
|
||||
revisionFrequency,
|
||||
contentSizeTransferLimit,
|
||||
itemTransferCalculator,
|
||||
timer,
|
||||
logger,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
timeHelper = new Timer()
|
||||
|
||||
item1 = {
|
||||
uuid: '1-2-3',
|
||||
userUuid: '1-2-3',
|
||||
createdAt: new Date(1616164633241311),
|
||||
createdAtTimestamp: 1616164633241311,
|
||||
updatedAt: new Date(1616164633241311),
|
||||
updatedAtTimestamp: 1616164633241311,
|
||||
} as jest.Mocked<Item>
|
||||
item2 = {
|
||||
uuid: '2-3-4',
|
||||
userUuid: '1-2-3',
|
||||
createdAt: new Date(1616164633241312),
|
||||
createdAtTimestamp: 1616164633241312,
|
||||
updatedAt: new Date(1616164633241312),
|
||||
updatedAtTimestamp: 1616164633241312,
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
itemHash1 = {
|
||||
uuid: '1-2-3',
|
||||
content: 'asdqwe1',
|
||||
content_type: ContentType.Note,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'qweqwe1',
|
||||
items_key_id: 'asdasd1',
|
||||
created_at: timeHelper.formatDate(
|
||||
timeHelper.convertMicrosecondsToDate(item1.createdAtTimestamp),
|
||||
'YYYY-MM-DDTHH:mm:ss.SSS[Z]',
|
||||
),
|
||||
updated_at: timeHelper.formatDate(
|
||||
new Date(timeHelper.convertMicrosecondsToMilliseconds(item1.updatedAtTimestamp) + 1),
|
||||
'YYYY-MM-DDTHH:mm:ss.SSS[Z]',
|
||||
),
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
itemHash2 = {
|
||||
uuid: '2-3-4',
|
||||
content: 'asdqwe2',
|
||||
content_type: ContentType.Note,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'qweqwe2',
|
||||
items_key_id: 'asdasd2',
|
||||
created_at: timeHelper.formatDate(
|
||||
timeHelper.convertMicrosecondsToDate(item2.createdAtTimestamp),
|
||||
'YYYY-MM-DDTHH:mm:ss.SSS[Z]',
|
||||
),
|
||||
updated_at: timeHelper.formatDate(
|
||||
new Date(timeHelper.convertMicrosecondsToMilliseconds(item2.updatedAtTimestamp) + 1),
|
||||
'YYYY-MM-DDTHH:mm:ss.SSS[Z]',
|
||||
),
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
emptyHash = {
|
||||
uuid: '2-3-4',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
itemTransferCalculator = {} as jest.Mocked<ItemTransferCalculatorInterface>
|
||||
itemTransferCalculator.computeItemUuidsToFetch = jest.fn().mockReturnValue([item1.uuid, item2.uuid])
|
||||
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item1, item2])
|
||||
itemRepository.countAll = jest.fn().mockReturnValue(2)
|
||||
itemRepository.save = jest.fn().mockImplementation((item: Item) => item)
|
||||
|
||||
revisionService = {} as jest.Mocked<RevisionServiceInterface>
|
||||
revisionService.createRevision = jest.fn()
|
||||
|
||||
timer = {} as jest.Mocked<TimerInterface>
|
||||
timer.getTimestampInMicroseconds = jest.fn().mockReturnValue(1616164633241568)
|
||||
timer.getUTCDate = jest.fn().mockReturnValue(new Date())
|
||||
timer.convertStringDateToDate = jest
|
||||
.fn()
|
||||
.mockImplementation((date: string) => timeHelper.convertStringDateToDate(date))
|
||||
timer.convertMicrosecondsToSeconds = jest.fn().mockReturnValue(600)
|
||||
timer.convertStringDateToMicroseconds = jest
|
||||
.fn()
|
||||
.mockImplementation((date: string) => timeHelper.convertStringDateToMicroseconds(date))
|
||||
timer.convertMicrosecondsToDate = jest
|
||||
.fn()
|
||||
.mockImplementation((microseconds: number) => timeHelper.convertMicrosecondsToDate(microseconds))
|
||||
|
||||
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
|
||||
domainEventPublisher.publish = jest.fn()
|
||||
|
||||
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
|
||||
domainEventFactory.createDuplicateItemSyncedEvent = jest.fn()
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.error = jest.fn()
|
||||
logger.warn = jest.fn()
|
||||
|
||||
syncToken = Buffer.from('2:1616164633.241564', 'utf-8').toString('base64')
|
||||
|
||||
itemSaveValidator = {} as jest.Mocked<ItemSaveValidatorInterface>
|
||||
itemSaveValidator.validate = jest.fn().mockReturnValue({ passed: true })
|
||||
|
||||
newItem = {} as jest.Mocked<Item>
|
||||
|
||||
itemFactory = {} as jest.Mocked<ItemFactoryInterface>
|
||||
itemFactory.create = jest.fn().mockReturnValue(newItem)
|
||||
itemFactory.createStub = jest.fn().mockReturnValue(newItem)
|
||||
})
|
||||
|
||||
it('should retrieve all items for a user from last sync with sync token version 1', async () => {
|
||||
syncToken = Buffer.from('1:2021-03-15 07:00:00', 'utf-8').toString('base64')
|
||||
|
||||
expect(
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
contentType: ContentType.Note,
|
||||
}),
|
||||
).toEqual({
|
||||
items: [item1, item2],
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1615791600000000,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortOrder: 'ASC',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve all items for a user from last sync', async () => {
|
||||
expect(
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
contentType: ContentType.Note,
|
||||
}),
|
||||
).toEqual({
|
||||
items: [item1, item2],
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241564,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve no items for a user if there are none from last sync', async () => {
|
||||
itemTransferCalculator.computeItemUuidsToFetch = jest.fn().mockReturnValue([])
|
||||
|
||||
expect(
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
contentType: ContentType.Note,
|
||||
}),
|
||||
).toEqual({
|
||||
items: [],
|
||||
})
|
||||
|
||||
expect(itemRepository.findAll).not.toHaveBeenCalled()
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241564,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
})
|
||||
|
||||
it('should return a cursor token if there are more items than requested with limit', async () => {
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item1])
|
||||
|
||||
const itemsResponse = await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
limit: 1,
|
||||
contentType: ContentType.Note,
|
||||
})
|
||||
|
||||
expect(itemsResponse).toEqual({
|
||||
cursorToken: 'MjoxNjE2MTY0NjMzLjI0MTMxMQ==',
|
||||
items: [item1],
|
||||
})
|
||||
|
||||
expect(Buffer.from(<string>itemsResponse.cursorToken, 'base64').toString('utf-8')).toEqual('2:1616164633.241311')
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241564,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 1,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve all items for a user from cursor token', async () => {
|
||||
const cursorToken = Buffer.from('2:1616164633.241123', 'utf-8').toString('base64')
|
||||
|
||||
expect(
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
cursorToken,
|
||||
contentType: ContentType.Note,
|
||||
}),
|
||||
).toEqual({
|
||||
items: [item1, item2],
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241123,
|
||||
syncTimeComparison: '>=',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve all undeleted items for a user without cursor or sync token', async () => {
|
||||
expect(
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
contentType: ContentType.Note,
|
||||
}),
|
||||
).toEqual({
|
||||
items: [item1, item2],
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
deleted: false,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
syncTimeComparison: '>',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve all items with default limit if not defined', async () => {
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
contentType: ContentType.Note,
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241564,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortOrder: 'ASC',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
})
|
||||
})
|
||||
|
||||
it('should retrieve all items with non-positive limit if not defined', async () => {
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken,
|
||||
limit: 0,
|
||||
contentType: ContentType.Note,
|
||||
})
|
||||
|
||||
expect(itemRepository.countAll).toHaveBeenCalledWith({
|
||||
contentType: 'Note',
|
||||
lastSyncTime: 1616164633241564,
|
||||
syncTimeComparison: '>',
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
userUuid: '1-2-3',
|
||||
limit: 150,
|
||||
})
|
||||
expect(itemRepository.findAll).toHaveBeenCalledWith({
|
||||
uuids: ['1-2-3', '2-3-4'],
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if the sync token is missing time', async () => {
|
||||
let error = null
|
||||
|
||||
try {
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken: '2:',
|
||||
limit: 0,
|
||||
contentType: ContentType.Note,
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should throw an error if the sync token is missing version', async () => {
|
||||
let error = null
|
||||
|
||||
try {
|
||||
await createService().getItems({
|
||||
userUuid: '1-2-3',
|
||||
syncToken: '1234567890',
|
||||
limit: 0,
|
||||
contentType: ContentType.Note,
|
||||
})
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should front load keys items to top of the collection for better client performance', async () => {
|
||||
const item3 = {
|
||||
uuid: '1-2-3',
|
||||
} as jest.Mocked<Item>
|
||||
const item4 = {
|
||||
uuid: '4-5-6',
|
||||
} as jest.Mocked<Item>
|
||||
|
||||
itemRepository.findAll = jest.fn().mockReturnValue([item3, item4])
|
||||
|
||||
await createService().frontLoadKeysItemsToTop('1-2-3', [item1, item2])
|
||||
})
|
||||
|
||||
it('should save new items', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [newItem],
|
||||
syncToken: 'MjpOYU4=',
|
||||
})
|
||||
|
||||
expect(revisionService.createRevision).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not save new items in read only access mode', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: true,
|
||||
sessionUuid: null,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [
|
||||
{
|
||||
type: 'readonly_error',
|
||||
unsavedItem: itemHash1,
|
||||
},
|
||||
],
|
||||
savedItems: [],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
|
||||
expect(revisionService.createRevision).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
||||
it('should save new items that are duplicates', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
const duplicateItem = { updatedAtTimestamp: 1616164633241570, duplicateOf: '1-2-3' } as jest.Mocked<Item>
|
||||
itemFactory.create = jest.fn().mockReturnValueOnce(duplicateItem)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [duplicateItem],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU3MQ==',
|
||||
})
|
||||
|
||||
expect(revisionService.createRevision).toHaveBeenCalledTimes(1)
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(1)
|
||||
expect(domainEventFactory.createDuplicateItemSyncedEvent).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should skip items that are conflicting on validation', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
const conflict = {} as jest.Mocked<ItemConflict>
|
||||
const validationResult = { passed: false, conflict }
|
||||
itemSaveValidator.validate = jest.fn().mockReturnValue(validationResult)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [conflict],
|
||||
savedItems: [],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should mark items as saved that are skipped on validation', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
const skipped = {} as jest.Mocked<Item>
|
||||
const validationResult = { passed: false, skipped }
|
||||
itemSaveValidator.validate = jest.fn().mockReturnValue(validationResult)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [skipped],
|
||||
syncToken: 'MjpOYU4=',
|
||||
})
|
||||
})
|
||||
|
||||
it('should calculate the sync token based on last updated date of saved items incremented with 1 microsecond to avoid returning same object in subsequent sync', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
|
||||
const itemHash3 = {
|
||||
uuid: '3-4-5',
|
||||
content: 'asdqwe3',
|
||||
content_type: ContentType.Note,
|
||||
duplicate_of: null,
|
||||
enc_item_key: 'qweqwe3',
|
||||
items_key_id: 'asdasd3',
|
||||
created_at: '2021-02-19T11:35:45.652Z',
|
||||
updated_at: '2021-03-25T09:37:37.943Z',
|
||||
} as jest.Mocked<ItemHash>
|
||||
|
||||
const saveProcedureStartTimestamp = 1616164633241580
|
||||
const item1Timestamp = 1616164633241570
|
||||
const item2Timestamp = 1616164633241568
|
||||
const item3Timestamp = 1616164633241569
|
||||
timer.getTimestampInMicroseconds = jest.fn().mockReturnValueOnce(saveProcedureStartTimestamp)
|
||||
|
||||
itemFactory.create = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce({ updatedAtTimestamp: item1Timestamp, duplicateOf: null } as jest.Mocked<Item>)
|
||||
.mockReturnValueOnce({ updatedAtTimestamp: item2Timestamp, duplicateOf: null } as jest.Mocked<Item>)
|
||||
.mockReturnValueOnce({ updatedAtTimestamp: item3Timestamp, duplicateOf: null } as jest.Mocked<Item>)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1, itemHash3, itemHash2],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result.syncToken).toEqual('MjoxNjE2MTY0NjMzLjI0MTU3MQ==')
|
||||
expect(Buffer.from(result.syncToken, 'base64').toString('utf-8')).toEqual('2:1616164633.241571')
|
||||
})
|
||||
|
||||
it('should update existing items', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update existing items from legacy clients', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
delete itemHash1.updated_at
|
||||
delete itemHash1.updated_at_timestamp
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20161215,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update existing items with created_at_timestamp', async () => {
|
||||
itemHash1.created_at_timestamp = 123
|
||||
itemHash1.updated_at_timestamp = item1.updatedAtTimestamp
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: 123,
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update existing empty hashes', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item2)
|
||||
emptyHash.updated_at = timeHelper.formatDate(
|
||||
new Date(timeHelper.convertMicrosecondsToMilliseconds(item2.updatedAtTimestamp) + 1),
|
||||
'YYYY-MM-DDTHH:mm:ss.SSS[Z]',
|
||||
)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [emptyHash],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
contentSize: 0,
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '2-3-4',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should create a revision for existing item if revisions frequency is matched', async () => {
|
||||
timer.convertMicrosecondsToSeconds = itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update existing items with empty user-agent', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update existing items with auth hash', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
itemHash1.auth_hash = 'test'
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
itemsKeyId: 'asdasd1',
|
||||
authHash: 'test',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should mark existing item as deleted', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
itemHash1.deleted = true
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: null,
|
||||
contentSize: 0,
|
||||
authHash: null,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: null,
|
||||
deleted: true,
|
||||
itemsKeyId: null,
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
|
||||
it('should mark existing item as duplicate', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(item1)
|
||||
|
||||
itemHash1.duplicate_of = '1-2-3'
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [],
|
||||
savedItems: [
|
||||
{
|
||||
content: 'asdqwe1',
|
||||
contentSize: 7,
|
||||
contentType: 'Note',
|
||||
createdAtTimestamp: expect.any(Number),
|
||||
createdAt: expect.any(Date),
|
||||
encItemKey: 'qweqwe1',
|
||||
duplicateOf: '1-2-3',
|
||||
itemsKeyId: 'asdasd1',
|
||||
userUuid: '1-2-3',
|
||||
updatedAtTimestamp: expect.any(Number),
|
||||
updatedAt: expect.any(Date),
|
||||
updatedWithSession: '2-3-4',
|
||||
uuid: '1-2-3',
|
||||
},
|
||||
],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
expect(domainEventPublisher.publish).toHaveBeenCalledTimes(1)
|
||||
expect(domainEventFactory.createDuplicateItemSyncedEvent).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should skip saving conflicting items and mark them as sync conflicts when saving to database fails', async () => {
|
||||
itemRepository.findByUuid = jest.fn().mockReturnValue(null)
|
||||
itemRepository.save = jest.fn().mockImplementation(() => {
|
||||
throw new Error('Something bad happened')
|
||||
})
|
||||
|
||||
const result = await createService().saveItems({
|
||||
itemHashes: [itemHash1, itemHash2],
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
readOnlyAccess: false,
|
||||
sessionUuid: '2-3-4',
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
conflicts: [
|
||||
{
|
||||
type: 'uuid_conflict',
|
||||
unsavedItem: itemHash1,
|
||||
},
|
||||
{
|
||||
type: 'uuid_conflict',
|
||||
unsavedItem: itemHash2,
|
||||
},
|
||||
],
|
||||
savedItems: [],
|
||||
syncToken: 'MjoxNjE2MTY0NjMzLjI0MTU2OQ==',
|
||||
})
|
||||
})
|
||||
})
|
301
packages/syncing-server/src/Domain/Item/ItemService.ts
Normal file
301
packages/syncing-server/src/Domain/Item/ItemService.ts
Normal file
|
@ -0,0 +1,301 @@
|
|||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { Time, TimerInterface } from '@standardnotes/time'
|
||||
import { ContentType, Uuid } from '@standardnotes/common'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { RevisionServiceInterface } from '../Revision/RevisionServiceInterface'
|
||||
import { GetItemsDTO } from './GetItemsDTO'
|
||||
import { GetItemsResult } from './GetItemsResult'
|
||||
import { Item } from './Item'
|
||||
import { ItemConflict } from './ItemConflict'
|
||||
import { ItemFactoryInterface } from './ItemFactoryInterface'
|
||||
import { ItemHash } from './ItemHash'
|
||||
import { ItemQuery } from './ItemQuery'
|
||||
import { ItemRepositoryInterface } from './ItemRepositoryInterface'
|
||||
import { ItemServiceInterface } from './ItemServiceInterface'
|
||||
import { SaveItemsDTO } from './SaveItemsDTO'
|
||||
import { SaveItemsResult } from './SaveItemsResult'
|
||||
import { ItemSaveValidatorInterface } from './SaveValidator/ItemSaveValidatorInterface'
|
||||
import { ConflictType } from '@standardnotes/responses'
|
||||
import { ItemTransferCalculatorInterface } from './ItemTransferCalculatorInterface'
|
||||
|
||||
@injectable()
|
||||
export class ItemService implements ItemServiceInterface {
|
||||
private readonly DEFAULT_ITEMS_LIMIT = 150
|
||||
private readonly SYNC_TOKEN_VERSION = 2
|
||||
|
||||
constructor(
|
||||
@inject(TYPES.ItemSaveValidator) private itemSaveValidator: ItemSaveValidatorInterface,
|
||||
@inject(TYPES.ItemFactory) private itemFactory: ItemFactoryInterface,
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.RevisionService) private revisionService: RevisionServiceInterface,
|
||||
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
@inject(TYPES.REVISIONS_FREQUENCY) private revisionFrequency: number,
|
||||
@inject(TYPES.CONTENT_SIZE_TRANSFER_LIMIT) private contentSizeTransferLimit: number,
|
||||
@inject(TYPES.ItemTransferCalculator) private itemTransferCalculator: ItemTransferCalculatorInterface,
|
||||
@inject(TYPES.Timer) private timer: TimerInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async getItems(dto: GetItemsDTO): Promise<GetItemsResult> {
|
||||
const lastSyncTime = this.getLastSyncTime(dto)
|
||||
const syncTimeComparison = dto.cursorToken ? '>=' : '>'
|
||||
const limit = dto.limit === undefined || dto.limit < 1 ? this.DEFAULT_ITEMS_LIMIT : dto.limit
|
||||
|
||||
const itemQuery: ItemQuery = {
|
||||
userUuid: dto.userUuid,
|
||||
lastSyncTime,
|
||||
syncTimeComparison,
|
||||
contentType: dto.contentType,
|
||||
deleted: lastSyncTime ? undefined : false,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
limit,
|
||||
}
|
||||
|
||||
const itemUuidsToFetch = await this.itemTransferCalculator.computeItemUuidsToFetch(
|
||||
itemQuery,
|
||||
this.contentSizeTransferLimit,
|
||||
)
|
||||
let items: Array<Item> = []
|
||||
if (itemUuidsToFetch.length > 0) {
|
||||
items = await this.itemRepository.findAll({
|
||||
uuids: itemUuidsToFetch,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
}
|
||||
const totalItemsCount = await this.itemRepository.countAll(itemQuery)
|
||||
|
||||
let cursorToken = undefined
|
||||
if (totalItemsCount > limit) {
|
||||
const lastSyncTime = items[items.length - 1].updatedAtTimestamp / Time.MicrosecondsInASecond
|
||||
cursorToken = Buffer.from(`${this.SYNC_TOKEN_VERSION}:${lastSyncTime}`, 'utf-8').toString('base64')
|
||||
}
|
||||
|
||||
return {
|
||||
items,
|
||||
cursorToken,
|
||||
}
|
||||
}
|
||||
|
||||
async saveItems(dto: SaveItemsDTO): Promise<SaveItemsResult> {
|
||||
const savedItems: Array<Item> = []
|
||||
const conflicts: Array<ItemConflict> = []
|
||||
|
||||
const lastUpdatedTimestamp = this.timer.getTimestampInMicroseconds()
|
||||
|
||||
for (const itemHash of dto.itemHashes) {
|
||||
if (dto.readOnlyAccess) {
|
||||
conflicts.push({
|
||||
unsavedItem: itemHash,
|
||||
type: ConflictType.ReadOnlyError,
|
||||
})
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
const existingItem = await this.itemRepository.findByUuid(itemHash.uuid)
|
||||
const processingResult = await this.itemSaveValidator.validate({
|
||||
userUuid: dto.userUuid,
|
||||
apiVersion: dto.apiVersion,
|
||||
itemHash,
|
||||
existingItem,
|
||||
})
|
||||
if (!processingResult.passed) {
|
||||
if (processingResult.conflict) {
|
||||
conflicts.push(processingResult.conflict)
|
||||
}
|
||||
if (processingResult.skipped) {
|
||||
savedItems.push(processingResult.skipped)
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if (existingItem) {
|
||||
const updatedItem = await this.updateExistingItem({
|
||||
existingItem,
|
||||
itemHash,
|
||||
sessionUuid: dto.sessionUuid,
|
||||
})
|
||||
savedItems.push(updatedItem)
|
||||
} else {
|
||||
try {
|
||||
const newItem = await this.saveNewItem({ userUuid: dto.userUuid, itemHash, sessionUuid: dto.sessionUuid })
|
||||
savedItems.push(newItem)
|
||||
} catch (error) {
|
||||
this.logger.error(`[${dto.userUuid}] Saving item ${itemHash.uuid} failed. Error: ${(error as Error).message}`)
|
||||
|
||||
conflicts.push({
|
||||
unsavedItem: itemHash,
|
||||
type: ConflictType.UuidConflict,
|
||||
})
|
||||
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const syncToken = this.calculateSyncToken(lastUpdatedTimestamp, savedItems)
|
||||
|
||||
return {
|
||||
savedItems,
|
||||
conflicts,
|
||||
syncToken,
|
||||
}
|
||||
}
|
||||
|
||||
async frontLoadKeysItemsToTop(userUuid: string, retrievedItems: Array<Item>): Promise<Array<Item>> {
|
||||
const itemsKeys = await this.itemRepository.findAll({
|
||||
userUuid,
|
||||
contentType: ContentType.ItemsKey,
|
||||
sortBy: 'updated_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
})
|
||||
|
||||
const retrievedItemsIds: Array<string> = retrievedItems.map((item: Item) => item.uuid)
|
||||
|
||||
itemsKeys.forEach((itemKey: Item) => {
|
||||
if (retrievedItemsIds.indexOf(itemKey.uuid) === -1) {
|
||||
retrievedItems.unshift(itemKey)
|
||||
}
|
||||
})
|
||||
|
||||
return retrievedItems
|
||||
}
|
||||
|
||||
private calculateSyncToken(lastUpdatedTimestamp: number, savedItems: Array<Item>): string {
|
||||
if (savedItems.length) {
|
||||
const sortedItems = savedItems.sort((itemA: Item, itemB: Item) => {
|
||||
return itemA.updatedAtTimestamp > itemB.updatedAtTimestamp ? 1 : -1
|
||||
})
|
||||
lastUpdatedTimestamp = sortedItems[sortedItems.length - 1].updatedAtTimestamp
|
||||
}
|
||||
|
||||
const lastUpdatedTimestampWithMicrosecondPreventingSyncDoubles = lastUpdatedTimestamp + 1
|
||||
|
||||
return Buffer.from(
|
||||
`${this.SYNC_TOKEN_VERSION}:${
|
||||
lastUpdatedTimestampWithMicrosecondPreventingSyncDoubles / Time.MicrosecondsInASecond
|
||||
}`,
|
||||
'utf-8',
|
||||
).toString('base64')
|
||||
}
|
||||
|
||||
private async updateExistingItem(dto: {
|
||||
existingItem: Item
|
||||
itemHash: ItemHash
|
||||
sessionUuid: Uuid | null
|
||||
}): Promise<Item> {
|
||||
dto.existingItem.updatedWithSession = dto.sessionUuid
|
||||
dto.existingItem.contentSize = 0
|
||||
if (dto.itemHash.content) {
|
||||
dto.existingItem.content = dto.itemHash.content
|
||||
dto.existingItem.contentSize = Buffer.byteLength(dto.itemHash.content)
|
||||
}
|
||||
if (dto.itemHash.content_type) {
|
||||
dto.existingItem.contentType = dto.itemHash.content_type
|
||||
}
|
||||
if (dto.itemHash.deleted !== undefined) {
|
||||
dto.existingItem.deleted = dto.itemHash.deleted
|
||||
}
|
||||
let wasMarkedAsDuplicate = false
|
||||
if (dto.itemHash.duplicate_of) {
|
||||
wasMarkedAsDuplicate = !dto.existingItem.duplicateOf
|
||||
dto.existingItem.duplicateOf = dto.itemHash.duplicate_of
|
||||
}
|
||||
if (dto.itemHash.auth_hash) {
|
||||
dto.existingItem.authHash = dto.itemHash.auth_hash
|
||||
}
|
||||
if (dto.itemHash.enc_item_key) {
|
||||
dto.existingItem.encItemKey = dto.itemHash.enc_item_key
|
||||
}
|
||||
if (dto.itemHash.items_key_id) {
|
||||
dto.existingItem.itemsKeyId = dto.itemHash.items_key_id
|
||||
}
|
||||
|
||||
if (dto.itemHash.deleted === true) {
|
||||
dto.existingItem.deleted = true
|
||||
dto.existingItem.content = null
|
||||
;(dto.existingItem.contentSize = 0), (dto.existingItem.encItemKey = null)
|
||||
dto.existingItem.authHash = null
|
||||
dto.existingItem.itemsKeyId = null
|
||||
}
|
||||
|
||||
const updatedAt = this.timer.getTimestampInMicroseconds()
|
||||
const secondsFromLastUpdate = this.timer.convertMicrosecondsToSeconds(
|
||||
updatedAt - dto.existingItem.updatedAtTimestamp,
|
||||
)
|
||||
|
||||
if (dto.itemHash.created_at_timestamp) {
|
||||
dto.existingItem.createdAtTimestamp = dto.itemHash.created_at_timestamp
|
||||
dto.existingItem.createdAt = this.timer.convertMicrosecondsToDate(dto.itemHash.created_at_timestamp)
|
||||
} else if (dto.itemHash.created_at) {
|
||||
dto.existingItem.createdAtTimestamp = this.timer.convertStringDateToMicroseconds(dto.itemHash.created_at)
|
||||
dto.existingItem.createdAt = this.timer.convertStringDateToDate(dto.itemHash.created_at)
|
||||
}
|
||||
|
||||
dto.existingItem.updatedAtTimestamp = updatedAt
|
||||
dto.existingItem.updatedAt = this.timer.convertMicrosecondsToDate(updatedAt)
|
||||
|
||||
const savedItem = await this.itemRepository.save(dto.existingItem)
|
||||
|
||||
if (secondsFromLastUpdate >= this.revisionFrequency) {
|
||||
await this.revisionService.createRevision(savedItem)
|
||||
}
|
||||
|
||||
if (wasMarkedAsDuplicate) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createDuplicateItemSyncedEvent(savedItem.uuid, savedItem.userUuid),
|
||||
)
|
||||
}
|
||||
|
||||
return savedItem
|
||||
}
|
||||
|
||||
private async saveNewItem(dto: { userUuid: string; itemHash: ItemHash; sessionUuid: Uuid | null }): Promise<Item> {
|
||||
const newItem = this.itemFactory.create(dto)
|
||||
|
||||
const savedItem = await this.itemRepository.save(newItem)
|
||||
|
||||
await this.revisionService.createRevision(savedItem)
|
||||
|
||||
if (savedItem.duplicateOf) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createDuplicateItemSyncedEvent(savedItem.uuid, savedItem.userUuid),
|
||||
)
|
||||
}
|
||||
|
||||
return savedItem
|
||||
}
|
||||
|
||||
private getLastSyncTime(dto: GetItemsDTO): number | undefined {
|
||||
let token = dto.syncToken
|
||||
if (dto.cursorToken !== undefined && dto.cursorToken !== null) {
|
||||
token = dto.cursorToken
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const decodedToken = Buffer.from(token, 'base64').toString('utf-8')
|
||||
|
||||
const tokenParts = decodedToken.split(':')
|
||||
const version = tokenParts.shift()
|
||||
|
||||
switch (version) {
|
||||
case '1':
|
||||
return this.timer.convertStringDateToMicroseconds(tokenParts.join(':'))
|
||||
case '2':
|
||||
return +tokenParts[0] * Time.MicrosecondsInASecond
|
||||
default:
|
||||
throw Error('Sync token is missing version part')
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import { GetItemsDTO } from './GetItemsDTO'
|
||||
import { GetItemsResult } from './GetItemsResult'
|
||||
import { Item } from './Item'
|
||||
import { SaveItemsDTO } from './SaveItemsDTO'
|
||||
import { SaveItemsResult } from './SaveItemsResult'
|
||||
|
||||
export interface ItemServiceInterface {
|
||||
getItems(dto: GetItemsDTO): Promise<GetItemsResult>
|
||||
saveItems(dto: SaveItemsDTO): Promise<SaveItemsResult>
|
||||
frontLoadKeysItemsToTop(userUuid: string, retrievedItems: Array<Item>): Promise<Array<Item>>
|
||||
}
|
|
@ -0,0 +1,201 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { Logger } from 'winston'
|
||||
import { ItemQuery } from './ItemQuery'
|
||||
|
||||
import { ItemRepositoryInterface } from './ItemRepositoryInterface'
|
||||
|
||||
import { ItemTransferCalculator } from './ItemTransferCalculator'
|
||||
|
||||
describe('ItemTransferCalculator', () => {
|
||||
let itemRepository: ItemRepositoryInterface
|
||||
let logger: Logger
|
||||
|
||||
const createCalculator = () => new ItemTransferCalculator(itemRepository, logger)
|
||||
|
||||
beforeEach(() => {
|
||||
itemRepository = {} as jest.Mocked<ItemRepositoryInterface>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([])
|
||||
|
||||
logger = {} as jest.Mocked<Logger>
|
||||
logger.warn = jest.fn()
|
||||
})
|
||||
|
||||
describe('fetching uuids', () => {
|
||||
it('should compute uuids to fetch based on transfer limit - one item overlaping limit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidsToFetch(query, 50)
|
||||
|
||||
expect(result).toEqual(['1-2-3', '2-3-4', '3-4-5'])
|
||||
})
|
||||
|
||||
it('should compute uuids to fetch based on transfer limit - exact limit fit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidsToFetch(query, 40)
|
||||
|
||||
expect(result).toEqual(['1-2-3', '2-3-4'])
|
||||
})
|
||||
|
||||
it('should compute uuids to fetch based on transfer limit - content size not defined on an item', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidsToFetch(query, 50)
|
||||
|
||||
expect(result).toEqual(['1-2-3', '2-3-4', '3-4-5'])
|
||||
})
|
||||
|
||||
it('should compute uuids to fetch based on transfer limit - first item over the limit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 50,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidsToFetch(query, 40)
|
||||
|
||||
expect(result).toEqual(['1-2-3', '2-3-4'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('fetching bundles', () => {
|
||||
it('should compute uuid bundles to fetch based on transfer limit - one item overlaping limit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidBundlesToFetch(query, 50)
|
||||
|
||||
expect(result).toEqual([['1-2-3', '2-3-4', '3-4-5']])
|
||||
})
|
||||
|
||||
it('should compute uuid bundles to fetch based on transfer limit - exact limit fit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidBundlesToFetch(query, 40)
|
||||
|
||||
expect(result).toEqual([['1-2-3', '2-3-4'], ['3-4-5']])
|
||||
})
|
||||
|
||||
it('should compute uuid bundles to fetch based on transfer limit - content size not defined on an item', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidBundlesToFetch(query, 50)
|
||||
|
||||
expect(result).toEqual([['1-2-3', '2-3-4', '3-4-5']])
|
||||
})
|
||||
|
||||
it('should compute uuid bundles to fetch based on transfer limit - first item over the limit', async () => {
|
||||
const query = {} as jest.Mocked<ItemQuery>
|
||||
itemRepository.findContentSizeForComputingTransferLimit = jest.fn().mockReturnValue([
|
||||
{
|
||||
uuid: '1-2-3',
|
||||
contentSize: 50,
|
||||
},
|
||||
{
|
||||
uuid: '2-3-4',
|
||||
contentSize: 20,
|
||||
},
|
||||
{
|
||||
uuid: '3-4-5',
|
||||
contentSize: 20,
|
||||
},
|
||||
])
|
||||
|
||||
const result = await createCalculator().computeItemUuidBundlesToFetch(query, 40)
|
||||
|
||||
expect(result).toEqual([['1-2-3', '2-3-4'], ['3-4-5']])
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,93 @@
|
|||
import { inject, injectable } from 'inversify'
|
||||
import { Uuid } from '@standardnotes/common'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
|
||||
import { ItemTransferCalculatorInterface } from './ItemTransferCalculatorInterface'
|
||||
import { ItemQuery } from './ItemQuery'
|
||||
import { ItemRepositoryInterface } from './ItemRepositoryInterface'
|
||||
|
||||
@injectable()
|
||||
export class ItemTransferCalculator implements ItemTransferCalculatorInterface {
|
||||
constructor(
|
||||
@inject(TYPES.ItemRepository) private itemRepository: ItemRepositoryInterface,
|
||||
@inject(TYPES.Logger) private logger: Logger,
|
||||
) {}
|
||||
|
||||
async computeItemUuidsToFetch(itemQuery: ItemQuery, bytesTransferLimit: number): Promise<Array<Uuid>> {
|
||||
const itemUuidsToFetch = []
|
||||
const itemContentSizes = await this.itemRepository.findContentSizeForComputingTransferLimit(itemQuery)
|
||||
let totalContentSizeInBytes = 0
|
||||
for (const itemContentSize of itemContentSizes) {
|
||||
const contentSize = itemContentSize.contentSize ?? 0
|
||||
|
||||
itemUuidsToFetch.push(itemContentSize.uuid)
|
||||
totalContentSizeInBytes += contentSize
|
||||
|
||||
const transferLimitBreached = this.isTransferLimitBreached({
|
||||
totalContentSizeInBytes,
|
||||
bytesTransferLimit,
|
||||
itemUuidsToFetch,
|
||||
itemContentSizes,
|
||||
})
|
||||
|
||||
if (transferLimitBreached) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return itemUuidsToFetch
|
||||
}
|
||||
|
||||
async computeItemUuidBundlesToFetch(itemQuery: ItemQuery, bytesTransferLimit: number): Promise<Array<Array<Uuid>>> {
|
||||
let itemUuidsToFetch = []
|
||||
const itemContentSizes = await this.itemRepository.findContentSizeForComputingTransferLimit(itemQuery)
|
||||
let totalContentSizeInBytes = 0
|
||||
const bundles = []
|
||||
for (const itemContentSize of itemContentSizes) {
|
||||
const contentSize = itemContentSize.contentSize ?? 0
|
||||
|
||||
itemUuidsToFetch.push(itemContentSize.uuid)
|
||||
totalContentSizeInBytes += contentSize
|
||||
|
||||
const transferLimitBreached = this.isTransferLimitBreached({
|
||||
totalContentSizeInBytes,
|
||||
bytesTransferLimit,
|
||||
itemUuidsToFetch,
|
||||
itemContentSizes,
|
||||
})
|
||||
|
||||
if (transferLimitBreached) {
|
||||
bundles.push(Object.assign([], itemUuidsToFetch))
|
||||
totalContentSizeInBytes = 0
|
||||
itemUuidsToFetch = []
|
||||
}
|
||||
}
|
||||
|
||||
if (itemUuidsToFetch.length > 0) {
|
||||
bundles.push(itemUuidsToFetch)
|
||||
}
|
||||
|
||||
return bundles
|
||||
}
|
||||
|
||||
private isTransferLimitBreached(dto: {
|
||||
totalContentSizeInBytes: number
|
||||
bytesTransferLimit: number
|
||||
itemUuidsToFetch: Array<Uuid>
|
||||
itemContentSizes: Array<{ uuid: string; contentSize: number | null }>
|
||||
}): boolean {
|
||||
const transferLimitBreached = dto.totalContentSizeInBytes >= dto.bytesTransferLimit
|
||||
const transferLimitBreachedAtFirstItem =
|
||||
transferLimitBreached && dto.itemUuidsToFetch.length === 1 && dto.itemContentSizes.length > 1
|
||||
|
||||
if (transferLimitBreachedAtFirstItem) {
|
||||
this.logger.warn(
|
||||
`Item ${dto.itemUuidsToFetch[0]} is breaching the content size transfer limit: ${dto.bytesTransferLimit}`,
|
||||
)
|
||||
}
|
||||
|
||||
return transferLimitBreached && !transferLimitBreachedAtFirstItem
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import { Uuid } from '@standardnotes/common'
|
||||
|
||||
import { ItemQuery } from './ItemQuery'
|
||||
|
||||
export interface ItemTransferCalculatorInterface {
|
||||
computeItemUuidsToFetch(itemQuery: ItemQuery, bytesTransferLimit: number): Promise<Array<Uuid>>
|
||||
computeItemUuidBundlesToFetch(itemQuery: ItemQuery, bytesTransferLimit: number): Promise<Array<Array<Uuid>>>
|
||||
}
|
11
packages/syncing-server/src/Domain/Item/SaveItemsDTO.ts
Normal file
11
packages/syncing-server/src/Domain/Item/SaveItemsDTO.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
import { Uuid } from '@standardnotes/common'
|
||||
|
||||
import { ItemHash } from './ItemHash'
|
||||
|
||||
export type SaveItemsDTO = {
|
||||
itemHashes: ItemHash[]
|
||||
userUuid: string
|
||||
apiVersion: string
|
||||
readOnlyAccess: boolean
|
||||
sessionUuid: Uuid | null
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
import { Item } from './Item'
|
||||
import { ItemConflict } from './ItemConflict'
|
||||
|
||||
export type SaveItemsResult = {
|
||||
savedItems: Array<Item>
|
||||
conflicts: Array<ItemConflict>
|
||||
syncToken: string
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
import 'reflect-metadata'
|
||||
|
||||
import { ContentType } from '@standardnotes/common'
|
||||
|
||||
import { ApiVersion } from '../../Api/ApiVersion'
|
||||
import { Item } from '../Item'
|
||||
|
||||
import { ContentFilter } from './ContentFilter'
|
||||
|
||||
describe('ContentFilter', () => {
|
||||
let existingItem: Item
|
||||
const createFilter = () => new ContentFilter()
|
||||
|
||||
it('should filter out items with invalid content', async () => {
|
||||
const invalidContents = [[], { foo: 'bar' }, [{ foo: 'bar' }], 123, new Date(1)]
|
||||
|
||||
for (const invalidContent of invalidContents) {
|
||||
const result = await createFilter().check({
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
itemHash: {
|
||||
uuid: '123e4567-e89b-12d3-a456-426655440000',
|
||||
content: invalidContent as unknown as string,
|
||||
content_type: ContentType.Note,
|
||||
},
|
||||
existingItem: null,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
passed: false,
|
||||
conflict: {
|
||||
unsavedItem: {
|
||||
uuid: '123e4567-e89b-12d3-a456-426655440000',
|
||||
content: invalidContent,
|
||||
content_type: ContentType.Note,
|
||||
},
|
||||
type: 'content_error',
|
||||
},
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('should leave items with valid content', async () => {
|
||||
const validContents = ['string', null, undefined]
|
||||
|
||||
for (const validContent of validContents) {
|
||||
const result = await createFilter().check({
|
||||
userUuid: '1-2-3',
|
||||
apiVersion: ApiVersion.v20200115,
|
||||
itemHash: {
|
||||
uuid: '123e4567-e89b-12d3-a456-426655440000',
|
||||
content: validContent as unknown as string,
|
||||
content_type: ContentType.Note,
|
||||
},
|
||||
existingItem,
|
||||
})
|
||||
|
||||
expect(result).toEqual({
|
||||
passed: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue