feat: add files server package

This commit is contained in:
Karol Sójko 2022-06-22 16:44:45 +02:00
parent d61e6f338e
commit 7a8a5fcfdf
No known key found for this signature in database
GPG key ID: A50543BF560BDEB0
108 changed files with 4314 additions and 28 deletions

170
.github/workflows/files.release.dev.yml vendored Normal file
View file

@ -0,0 +1,170 @@
name: Files Server Dev
concurrency:
group: files_dev_environment
cancel-in-progress: true
on:
push:
tags:
- '@standardnotes/files-server@[0-9]*.[0-9]*.[0-9]*-alpha.[0-9]*'
- '@standardnotes/files-server@[0-9]*.[0-9]*.[0-9]*-beta.[0-9]*'
workflow_dispatch:
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v1
with:
node-version: '16.x'
- run: yarn lint:files
- run: yarn test:files
publish-aws-ecr:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: cp .env.sample .env
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1
- name: Build, tag, and push image to Amazon ECR
id: build-image
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
ECR_REPOSITORY: files
IMAGE_TAG: ${{ github.sha }}
run: |
yarn docker build @standardnotes/files -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:dev
docker push $ECR_REGISTRY/$ECR_REPOSITORY:dev
publish-docker-hub:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: cp .env.sample .env
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@master
with:
name: standardnotes/files
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
tags: "dev,${{ github.sha }}"
deploy-web:
needs: publish-aws-ecr
runs-on: ubuntu-latest
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition files-dev --query taskDefinition > task-definition.json
- name: Fill in the new version in the Amazon ECS task definition
run: |
jq '(.containerDefinitions[] | select(.name=="files-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: files-dev
image: ${{ secrets.AWS_ECR_REGISTRY }}/files:${{ github.sha }}
- name: Deploy Amazon ECS task definition
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: files-dev
cluster: dev
wait-for-service-stability: true
deploy-worker:
needs: publish-aws-ecr
runs-on: ubuntu-latest
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Download task definition
run: |
aws ecs describe-task-definition --task-definition files-worker-dev --query taskDefinition > task-definition.json
- name: Fill in the new version in the Amazon ECS task definition
run: |
jq '(.containerDefinitions[] | select(.name=="files-worker-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json
- name: Fill in the new image ID in the Amazon ECS task definition
id: task-def
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: task-definition.json
container-name: files-worker-dev
image: ${{ secrets.AWS_ECR_REGISTRY }}/files:${{ github.sha }}
- name: Deploy Amazon ECS task definition
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.task-def.outputs.task-definition }}
service: files-worker-dev
cluster: dev
wait-for-service-stability: true
newrelic:
needs: [ deploy-web, deploy-worker ]
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WEB_DEV }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WORKER_DEV }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
notify_discord:
needs: [ deploy-web, deploy-worker ]
runs-on: ubuntu-latest
steps:
- name: Run Discord Webhook
uses: johnnyhuy/actions-discord-git-webhook@main
with:
webhook_url: ${{ secrets.DISCORD_WEBHOOK_URL }}

3
.gitignore vendored
View file

@ -15,3 +15,6 @@ newrelic_agent.log
!.yarn/unplugged
!.yarn/sdks
!.yarn/versions
packages/files/uploads/*
!packages/files/uploads/.gitkeep

472
.pnp.cjs generated
View file

@ -24,6 +24,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"name": "@standardnotes/auth-server",\
"reference": "workspace:packages/auth"\
},\
{\
"name": "@standardnotes/files-server",\
"reference": "workspace:packages/files"\
},\
{\
"name": "@standardnotes/scheduler-server",\
"reference": "workspace:packages/scheduler"\
@ -37,6 +41,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"ignorePatternData": "(^(?:\\\\.yarn\\\\/sdks(?:\\\\/(?!\\\\.{1,2}(?:\\\\/|$))(?:(?:(?!(?:^|\\\\/)\\\\.{1,2}(?:\\\\/|$)).)*?)|$))$)",\
"fallbackExclusionList": [\
["@standardnotes/auth-server", ["workspace:packages/auth"]],\
["@standardnotes/files-server", ["workspace:packages/files"]],\
["@standardnotes/scheduler-server", ["workspace:packages/scheduler"]],\
["@standardnotes/server-monorepo", ["workspace:."]],\
["@standardnotes/syncing-server", ["workspace:packages/syncing-server"]]\
@ -1719,6 +1724,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@newrelic/aws-sdk", [\
["npm:3.1.0", {\
"packageLocation": "./.yarn/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip/node_modules/@newrelic/aws-sdk/",\
"packageDependencies": [\
["@newrelic/aws-sdk", "npm:3.1.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:4.1.2", {\
"packageLocation": "./.yarn/cache/@newrelic-aws-sdk-npm-4.1.2-9930120a02-610f6353a7.zip/node_modules/@newrelic/aws-sdk/",\
"packageDependencies": [\
@ -1753,9 +1765,29 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"newrelic"\
],\
"linkType": "HARD"\
}],\
["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0", {\
"packageLocation": "./.yarn/__virtual__/@newrelic-aws-sdk-virtual-9720173dde/0/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip/node_modules/@newrelic/aws-sdk/",\
"packageDependencies": [\
["@newrelic/aws-sdk", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0"],\
["@types/newrelic", null],\
["newrelic", "npm:7.5.2"]\
],\
"packagePeers": [\
"@types/newrelic",\
"newrelic"\
],\
"linkType": "HARD"\
}]\
]],\
["@newrelic/koa", [\
["npm:5.0.0", {\
"packageLocation": "./.yarn/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip/node_modules/@newrelic/koa/",\
"packageDependencies": [\
["@newrelic/koa", "npm:5.0.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:6.1.2", {\
"packageLocation": "./.yarn/cache/@newrelic-koa-npm-6.1.2-df0f7c71b5-e269d37b13.zip/node_modules/@newrelic/koa/",\
"packageDependencies": [\
@ -1788,9 +1820,33 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"newrelic"\
],\
"linkType": "HARD"\
}],\
["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0", {\
"packageLocation": "./.yarn/__virtual__/@newrelic-koa-virtual-2873d18af2/0/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip/node_modules/@newrelic/koa/",\
"packageDependencies": [\
["@newrelic/koa", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0"],\
["@types/newrelic", null],\
["methods", "npm:1.1.2"],\
["newrelic", "npm:7.5.2"]\
],\
"packagePeers": [\
"@types/newrelic",\
"newrelic"\
],\
"linkType": "HARD"\
}]\
]],\
["@newrelic/native-metrics", [\
["npm:6.0.2", {\
"packageLocation": "./.yarn/unplugged/@newrelic-native-metrics-npm-6.0.2-805c5534f5/node_modules/@newrelic/native-metrics/",\
"packageDependencies": [\
["@newrelic/native-metrics", "npm:6.0.2"],\
["nan", "npm:2.16.0"],\
["node-gyp", "npm:9.0.0"],\
["semver", "npm:5.7.1"]\
],\
"linkType": "HARD"\
}],\
["npm:7.0.2", {\
"packageLocation": "./.yarn/unplugged/@newrelic-native-metrics-npm-7.0.2-b4dcec08eb/node_modules/@newrelic/native-metrics/",\
"packageDependencies": [\
@ -1825,6 +1881,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@newrelic/superagent", [\
["npm:4.0.0", {\
"packageLocation": "./.yarn/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip/node_modules/@newrelic/superagent/",\
"packageDependencies": [\
["@newrelic/superagent", "npm:4.0.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:5.1.1", {\
"packageLocation": "./.yarn/cache/@newrelic-superagent-npm-5.1.1-0d3c1fccf8-b43f7b9bb6.zip/node_modules/@newrelic/superagent/",\
"packageDependencies": [\
@ -1857,6 +1920,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"newrelic"\
],\
"linkType": "HARD"\
}],\
["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0", {\
"packageLocation": "./.yarn/__virtual__/@newrelic-superagent-virtual-05297db2ec/0/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip/node_modules/@newrelic/superagent/",\
"packageDependencies": [\
["@newrelic/superagent", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0"],\
["@types/newrelic", null],\
["methods", "npm:1.1.2"],\
["newrelic", "npm:7.5.2"]\
],\
"packagePeers": [\
"@types/newrelic",\
"newrelic"\
],\
"linkType": "HARD"\
}]\
]],\
["@newrelic/winston-enricher", [\
@ -2588,6 +2665,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["@standardnotes/config", [\
["npm:2.0.1", {\
"packageLocation": "./.yarn/cache/@standardnotes-config-npm-2.0.1-5f34962133-5284e034f2.zip/node_modules/@standardnotes/config/",\
"packageDependencies": [\
["@standardnotes/config", "npm:2.0.1"],\
["@typescript-eslint/eslint-plugin", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\
["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["@standardnotes/domain-events", [\
["npm:2.32.2", {\
"packageLocation": "./.yarn/cache/@standardnotes-domain-events-npm-2.32.2-73adf7a999-54da5fc885.zip/node_modules/@standardnotes/domain-events/",\
@ -2639,6 +2727,58 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["@standardnotes/files-server", [\
["workspace:packages/files", {\
"packageLocation": "./packages/files/",\
"packageDependencies": [\
["@standardnotes/files-server", "workspace:packages/files"],\
["@newrelic/native-metrics", "npm:7.0.2"],\
["@sentry/node", "npm:6.19.7"],\
["@standardnotes/auth", "npm:3.19.3"],\
["@standardnotes/common", "npm:1.23.0"],\
["@standardnotes/config", "npm:2.0.1"],\
["@standardnotes/domain-events", "npm:2.32.2"],\
["@standardnotes/domain-events-infra", "npm:1.5.2"],\
["@standardnotes/sncrypto-common", "npm:1.9.0"],\
["@standardnotes/sncrypto-node", "npm:1.8.3"],\
["@standardnotes/time", "npm:1.7.0"],\
["@types/connect-busboy", "npm:1.0.0"],\
["@types/cors", "npm:2.8.12"],\
["@types/express", "npm:4.17.13"],\
["@types/ioredis", "npm:4.28.10"],\
["@types/jest", "npm:28.1.3"],\
["@types/jsonwebtoken", "npm:8.5.8"],\
["@types/newrelic", "npm:7.0.3"],\
["@types/prettyjson", "npm:0.0.29"],\
["@types/uuid", "npm:8.3.4"],\
["@typescript-eslint/eslint-plugin", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:5.29.0"],\
["aws-sdk", "npm:2.1158.0"],\
["connect-busboy", "npm:1.0.0"],\
["cors", "npm:2.8.5"],\
["dayjs", "npm:1.11.3"],\
["dotenv", "npm:8.6.0"],\
["eslint", "npm:8.18.0"],\
["eslint-plugin-prettier", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:4.0.0"],\
["express", "npm:4.18.1"],\
["express-winston", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0"],\
["helmet", "npm:4.6.0"],\
["inversify", "npm:6.0.1"],\
["inversify-express-utils", "npm:6.4.3"],\
["ioredis", "npm:5.0.6"],\
["jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.1.1"],\
["jsonwebtoken", "npm:8.5.1"],\
["newrelic", "npm:7.5.2"],\
["nodemon", "npm:2.0.16"],\
["prettyjson", "npm:1.2.5"],\
["reflect-metadata", "npm:0.1.13"],\
["ts-jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.0.5"],\
["ts-node", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1"],\
["uuid", "npm:8.3.2"],\
["winston", "npm:3.7.2"]\
],\
"linkType": "SOFT"\
}]\
]],\
["@standardnotes/models", [\
["npm:1.11.10", {\
"packageLocation": "./.yarn/cache/@standardnotes-models-npm-1.11.10-e4b5e4717d-d69fd3940e.zip/node_modules/@standardnotes/models/",\
@ -2989,6 +3129,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["@types/busboy", [\
["npm:1.5.0", {\
"packageLocation": "./.yarn/cache/@types-busboy-npm-1.5.0-0e24e7f08d-ffa7bf25c0.zip/node_modules/@types/busboy/",\
"packageDependencies": [\
["@types/busboy", "npm:1.5.0"],\
["@types/node", "npm:18.0.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["@types/connect", [\
["npm:3.4.35", {\
"packageLocation": "./.yarn/cache/@types-connect-npm-3.4.35-7337eee0a3-fe81351470.zip/node_modules/@types/connect/",\
@ -2999,6 +3149,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["@types/connect-busboy", [\
["npm:1.0.0", {\
"packageLocation": "./.yarn/cache/@types-connect-busboy-npm-1.0.0-fca702448d-ccbf7bc42d.zip/node_modules/@types/connect-busboy/",\
"packageDependencies": [\
["@types/connect-busboy", "npm:1.0.0"],\
["@types/busboy", "npm:1.5.0"],\
["@types/express", "npm:4.17.13"],\
["@types/node", "npm:18.0.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["@types/cors", [\
["npm:2.8.12", {\
"packageLocation": "./.yarn/cache/@types-cors-npm-2.8.12-ff52e8e514-8c45f112c7.zip/node_modules/@types/cors/",\
@ -3337,6 +3499,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@typescript-eslint/eslint-plugin", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip/node_modules/@typescript-eslint/eslint-plugin/",\
"packageDependencies": [\
["@typescript-eslint/eslint-plugin", "npm:4.33.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-eslint-plugin-npm-5.29.0-d7e482bb3e-b1022a640f.zip/node_modules/@typescript-eslint/eslint-plugin/",\
"packageDependencies": [\
@ -3403,9 +3572,73 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"typescript"\
],\
"linkType": "HARD"\
}],\
["virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-eslint-plugin-virtual-56ea46a0fe/0/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip/node_modules/@typescript-eslint/eslint-plugin/",\
"packageDependencies": [\
["@typescript-eslint/eslint-plugin", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\
["@types/eslint", null],\
["@types/typescript", null],\
["@types/typescript-eslint__parser", null],\
["@typescript-eslint/experimental-utils", "virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0"],\
["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\
["@typescript-eslint/scope-manager", "npm:4.33.0"],\
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
["eslint", null],\
["functional-red-black-tree", "npm:1.0.1"],\
["ignore", "npm:5.2.0"],\
["regexpp", "npm:3.2.0"],\
["semver", "npm:7.3.7"],\
["tsutils", "virtual:e64d2841693653abb2dee666d19406912f5e913a8081a709c081d9877d2f39987ff853b7cd736901a2df59af98328f7249f3db0da01abf060cf1d858d4d4e43b#npm:3.21.0"],\
["typescript", null]\
],\
"packagePeers": [\
"@types/eslint",\
"@types/typescript-eslint__parser",\
"@types/typescript",\
"@typescript-eslint/parser",\
"eslint",\
"typescript"\
],\
"linkType": "HARD"\
}]\
]],\
["@typescript-eslint/experimental-utils", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip/node_modules/@typescript-eslint/experimental-utils/",\
"packageDependencies": [\
["@typescript-eslint/experimental-utils", "npm:4.33.0"]\
],\
"linkType": "SOFT"\
}],\
["virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-experimental-utils-virtual-3b1d487b65/0/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip/node_modules/@typescript-eslint/experimental-utils/",\
"packageDependencies": [\
["@typescript-eslint/experimental-utils", "virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0"],\
["@types/eslint", null],\
["@types/json-schema", "npm:7.0.11"],\
["@typescript-eslint/scope-manager", "npm:4.33.0"],\
["@typescript-eslint/types", "npm:4.33.0"],\
["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\
["eslint", null],\
["eslint-scope", "npm:5.1.1"],\
["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"]\
],\
"packagePeers": [\
"@types/eslint",\
"eslint"\
],\
"linkType": "HARD"\
}]\
]],\
["@typescript-eslint/parser", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip/node_modules/@typescript-eslint/parser/",\
"packageDependencies": [\
["@typescript-eslint/parser", "npm:4.33.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-parser-npm-5.29.0-491a7f9690-7805796638.zip/node_modules/@typescript-eslint/parser/",\
"packageDependencies": [\
@ -3413,6 +3646,27 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
],\
"linkType": "SOFT"\
}],\
["virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-parser-virtual-636bc1eaeb/0/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip/node_modules/@typescript-eslint/parser/",\
"packageDependencies": [\
["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\
["@types/eslint", null],\
["@types/typescript", null],\
["@typescript-eslint/scope-manager", "npm:4.33.0"],\
["@typescript-eslint/types", "npm:4.33.0"],\
["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
["eslint", null],\
["typescript", null]\
],\
"packagePeers": [\
"@types/eslint",\
"@types/typescript",\
"eslint",\
"typescript"\
],\
"linkType": "HARD"\
}],\
["virtual:8859b278716fedf3e7458b5628625f7e35678c418626878559a0b816445001b7e24c55546f4677ba4c20b521aa0cf52cc33ac07deff171e383ada6eeab69933f#npm:5.29.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-parser-virtual-451c3112c8/0/cache/@typescript-eslint-parser-npm-5.29.0-491a7f9690-7805796638.zip/node_modules/@typescript-eslint/parser/",\
"packageDependencies": [\
@ -3436,6 +3690,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@typescript-eslint/scope-manager", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-scope-manager-npm-4.33.0-28014c179d-9a25fb7ba7.zip/node_modules/@typescript-eslint/scope-manager/",\
"packageDependencies": [\
["@typescript-eslint/scope-manager", "npm:4.33.0"],\
["@typescript-eslint/types", "npm:4.33.0"],\
["@typescript-eslint/visitor-keys", "npm:4.33.0"]\
],\
"linkType": "HARD"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-scope-manager-npm-5.29.0-03a6d28ed2-540642bef9.zip/node_modules/@typescript-eslint/scope-manager/",\
"packageDependencies": [\
@ -3496,6 +3759,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@typescript-eslint/types", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-types-npm-4.33.0-9e9b956afa-3baae1ca35.zip/node_modules/@typescript-eslint/types/",\
"packageDependencies": [\
["@typescript-eslint/types", "npm:4.33.0"]\
],\
"linkType": "HARD"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-types-npm-5.29.0-2bea7f0c9b-982ecdd691.zip/node_modules/@typescript-eslint/types/",\
"packageDependencies": [\
@ -3505,6 +3775,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@typescript-eslint/typescript-estree", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip/node_modules/@typescript-eslint/typescript-estree/",\
"packageDependencies": [\
["@typescript-eslint/typescript-estree", "npm:4.33.0"]\
],\
"linkType": "SOFT"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-typescript-estree-npm-5.29.0-f23de2ab5c-b91107a9fc.zip/node_modules/@typescript-eslint/typescript-estree/",\
"packageDependencies": [\
@ -3512,6 +3789,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
],\
"linkType": "SOFT"\
}],\
["virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-typescript-estree-virtual-ec62c0bda6/0/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip/node_modules/@typescript-eslint/typescript-estree/",\
"packageDependencies": [\
["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\
["@types/typescript", null],\
["@typescript-eslint/types", "npm:4.33.0"],\
["@typescript-eslint/visitor-keys", "npm:4.33.0"],\
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\
["globby", "npm:11.1.0"],\
["is-glob", "npm:4.0.3"],\
["semver", "npm:7.3.7"],\
["tsutils", "virtual:e64d2841693653abb2dee666d19406912f5e913a8081a709c081d9877d2f39987ff853b7cd736901a2df59af98328f7249f3db0da01abf060cf1d858d4d4e43b#npm:3.21.0"],\
["typescript", null]\
],\
"packagePeers": [\
"@types/typescript",\
"typescript"\
],\
"linkType": "HARD"\
}],\
["virtual:451c3112c8ebc24954be5135e65b7a370326adef1f6bb7aaca3ef2abc346ee165ef171b721a7207548ab6a19505983a443fc07f41cc553d4c9c5cddd04862b50#npm:5.29.0", {\
"packageLocation": "./.yarn/__virtual__/@typescript-eslint-typescript-estree-virtual-7e6283c452/0/cache/@typescript-eslint-typescript-estree-npm-5.29.0-f23de2ab5c-b91107a9fc.zip/node_modules/@typescript-eslint/typescript-estree/",\
"packageDependencies": [\
@ -3591,7 +3888,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
["@typescript-eslint/typescript-estree", "virtual:4ec458b53cfcb38d153394fe4d0300908a12ce721ae6026f1e2d7bbe8409ed98079b29d9688a9eb93463ace5dbaac7d454b12c4582b1cd0b1d8210588cf0cb1c#npm:5.29.0"],\
["eslint", null],\
["eslint-scope", "npm:5.1.1"],\
["eslint-utils", "virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0"]\
["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"]\
],\
"packagePeers": [\
"@types/eslint",\
@ -3601,6 +3898,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["@typescript-eslint/visitor-keys", [\
["npm:4.33.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-visitor-keys-npm-4.33.0-8b7e72a3c9-59953e474a.zip/node_modules/@typescript-eslint/visitor-keys/",\
"packageDependencies": [\
["@typescript-eslint/visitor-keys", "npm:4.33.0"],\
["@typescript-eslint/types", "npm:4.33.0"],\
["eslint-visitor-keys", "npm:2.1.0"]\
],\
"linkType": "HARD"\
}],\
["npm:5.29.0", {\
"packageLocation": "./.yarn/cache/@typescript-eslint-visitor-keys-npm-5.29.0-fe23f55f18-15f228ad9f.zip/node_modules/@typescript-eslint/visitor-keys/",\
"packageDependencies": [\
@ -3701,6 +4007,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["agent-base", [\
["npm:5.1.1", {\
"packageLocation": "./.yarn/cache/agent-base-npm-5.1.1-d451a4ad62-61ae789f30.zip/node_modules/agent-base/",\
"packageDependencies": [\
["agent-base", "npm:5.1.1"]\
],\
"linkType": "HARD"\
}],\
["npm:6.0.2", {\
"packageLocation": "./.yarn/cache/agent-base-npm-6.0.2-428f325a93-f52b6872cc.zip/node_modules/agent-base/",\
"packageDependencies": [\
@ -4381,6 +4694,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["busboy", [\
["npm:1.6.0", {\
"packageLocation": "./.yarn/cache/busboy-npm-1.6.0-ebb5cbb04b-32801e2c01.zip/node_modules/busboy/",\
"packageDependencies": [\
["busboy", "npm:1.6.0"],\
["streamsearch", "npm:1.1.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["byte-size", [\
["npm:7.0.1", {\
"packageLocation": "./.yarn/cache/byte-size-npm-7.0.1-cda9f76d28-6791663a6d.zip/node_modules/byte-size/",\
@ -4916,6 +5239,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["connect-busboy", [\
["npm:1.0.0", {\
"packageLocation": "./.yarn/cache/connect-busboy-npm-1.0.0-9908d1785d-e4a8cece06.zip/node_modules/connect-busboy/",\
"packageDependencies": [\
["connect-busboy", "npm:1.0.0"],\
["busboy", "npm:1.6.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["console-control-strings", [\
["npm:1.1.0", {\
"packageLocation": "./.yarn/cache/console-control-strings-npm-1.1.0-e3160e5275-8755d76787.zip/node_modules/console-control-strings/",\
@ -5568,6 +5901,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
["dotenv", "npm:8.2.0"]\
],\
"linkType": "HARD"\
}],\
["npm:8.6.0", {\
"packageLocation": "./.yarn/cache/dotenv-npm-8.6.0-2ce3e9f7bb-38e902c80b.zip/node_modules/dotenv/",\
"packageDependencies": [\
["dotenv", "npm:8.6.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["duplexer", [\
@ -5906,12 +6246,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
],\
"linkType": "SOFT"\
}],\
["virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0", {\
"packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-c2e00a0f83/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\
["virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0", {\
"packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-19087eaf4f/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\
"packageDependencies": [\
["eslint-utils", "virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0"],\
["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"],\
["@types/eslint", null],\
["eslint", "npm:8.18.0"],\
["eslint", null],\
["eslint-visitor-keys", "npm:2.1.0"]\
],\
"packagePeers": [\
@ -5920,12 +6260,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
],\
"linkType": "HARD"\
}],\
["virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0", {\
"packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-d9a84e87ea/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\
["virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0", {\
"packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-c2e00a0f83/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\
"packageDependencies": [\
["eslint-utils", "virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0"],\
["eslint-utils", "virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0"],\
["@types/eslint", null],\
["eslint", null],\
["eslint", "npm:8.18.0"],\
["eslint-visitor-keys", "npm:2.1.0"]\
],\
"packagePeers": [\
@ -6162,6 +6502,30 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["express-winston", [\
["npm:4.2.0", {\
"packageLocation": "./.yarn/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip/node_modules/express-winston/",\
"packageDependencies": [\
["express-winston", "npm:4.2.0"]\
],\
"linkType": "SOFT"\
}],\
["virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0", {\
"packageLocation": "./.yarn/__virtual__/express-winston-virtual-7edb98a399/0/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip/node_modules/express-winston/",\
"packageDependencies": [\
["express-winston", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0"],\
["@types/winston", null],\
["chalk", "npm:2.4.2"],\
["lodash", "npm:4.17.21"],\
["winston", "npm:3.7.2"]\
],\
"packagePeers": [\
"@types/winston",\
"winston"\
],\
"linkType": "HARD"\
}]\
]],\
["external-editor", [\
["npm:3.1.0", {\
"packageLocation": "./.yarn/cache/external-editor-npm-3.1.0-878e7807af-1c2a616a73.zip/node_modules/external-editor/",\
@ -6866,6 +7230,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
["helmet", "npm:4.3.1"]\
],\
"linkType": "HARD"\
}],\
["npm:4.6.0", {\
"packageLocation": "./.yarn/cache/helmet-npm-4.6.0-f244fd965c-139ad678d1.zip/node_modules/helmet/",\
"packageDependencies": [\
["helmet", "npm:4.6.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["highlight.js", [\
@ -6990,6 +7361,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["https-proxy-agent", [\
["npm:4.0.0", {\
"packageLocation": "./.yarn/cache/https-proxy-agent-npm-4.0.0-9021ec873f-19471d5aae.zip/node_modules/https-proxy-agent/",\
"packageDependencies": [\
["https-proxy-agent", "npm:4.0.0"],\
["agent-base", "npm:5.1.1"],\
["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"]\
],\
"linkType": "HARD"\
}],\
["npm:5.0.1", {\
"packageLocation": "./.yarn/cache/https-proxy-agent-npm-5.0.1-42d65f358e-571fccdf38.zip/node_modules/https-proxy-agent/",\
"packageDependencies": [\
@ -9252,6 +9632,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
}]\
]],\
["newrelic", [\
["npm:7.5.2", {\
"packageLocation": "./.yarn/cache/newrelic-npm-7.5.2-b949bcba7c-f6c67dbb7d.zip/node_modules/newrelic/",\
"packageDependencies": [\
["newrelic", "npm:7.5.2"],\
["@grpc/grpc-js", "npm:1.6.7"],\
["@grpc/proto-loader", "npm:0.5.6"],\
["@newrelic/aws-sdk", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0"],\
["@newrelic/koa", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0"],\
["@newrelic/native-metrics", "npm:6.0.2"],\
["@newrelic/superagent", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0"],\
["@tyriar/fibonacci-heap", "npm:2.0.9"],\
["async", "npm:3.2.4"],\
["concat-stream", "npm:2.0.0"],\
["https-proxy-agent", "npm:4.0.0"],\
["json-stringify-safe", "npm:5.0.1"],\
["readable-stream", "npm:3.6.0"],\
["semver", "npm:5.7.1"]\
],\
"linkType": "HARD"\
}],\
["npm:8.14.1", {\
"packageLocation": "./.yarn/cache/newrelic-npm-8.14.1-b659d4d19c-cd12bb2ac9.zip/node_modules/newrelic/",\
"packageDependencies": [\
@ -10223,6 +10623,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
["minimist", "npm:1.2.6"]\
],\
"linkType": "HARD"\
}],\
["npm:1.2.5", {\
"packageLocation": "./.yarn/cache/prettyjson-npm-1.2.5-a72b7bf823-e36e8ae4f7.zip/node_modules/prettyjson/",\
"packageDependencies": [\
["prettyjson", "npm:1.2.5"],\
["colors", "npm:1.4.0"],\
["minimist", "npm:1.2.6"]\
],\
"linkType": "HARD"\
}]\
]],\
["proc-log", [\
@ -11381,6 +11790,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"linkType": "HARD"\
}]\
]],\
["streamsearch", [\
["npm:1.1.0", {\
"packageLocation": "./.yarn/cache/streamsearch-npm-1.1.0-fc3ad6536d-1cce16cea8.zip/node_modules/streamsearch/",\
"packageDependencies": [\
["streamsearch", "npm:1.1.0"]\
],\
"linkType": "HARD"\
}]\
]],\
["strict-uri-encode", [\
["npm:2.0.0", {\
"packageLocation": "./.yarn/cache/strict-uri-encode-npm-2.0.0-1ec3189376-eaac4cf978.zip/node_modules/strict-uri-encode/",\
@ -11969,6 +12387,42 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) {
"typescript"\
],\
"linkType": "HARD"\
}],\
["virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1", {\
"packageLocation": "./.yarn/__virtual__/ts-node-virtual-28037b75c9/0/cache/ts-node-npm-10.8.1-24280b0982-7d1aa7aa3a.zip/node_modules/ts-node/",\
"packageDependencies": [\
["ts-node", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1"],\
["@cspotcode/source-map-support", "npm:0.8.1"],\
["@swc/core", null],\
["@swc/wasm", null],\
["@tsconfig/node10", "npm:1.0.9"],\
["@tsconfig/node12", "npm:1.0.11"],\
["@tsconfig/node14", "npm:1.0.3"],\
["@tsconfig/node16", "npm:1.0.3"],\
["@types/node", null],\
["@types/swc__core", null],\
["@types/swc__wasm", null],\
["@types/typescript", null],\
["acorn", "npm:8.7.1"],\
["acorn-walk", "npm:8.2.0"],\
["arg", "npm:4.1.3"],\
["create-require", "npm:1.1.1"],\
["diff", "npm:4.0.2"],\
["make-error", "npm:1.3.6"],\
["typescript", null],\
["v8-compile-cache-lib", "npm:3.0.1"],\
["yn", "npm:3.1.1"]\
],\
"packagePeers": [\
"@swc/core",\
"@swc/wasm",\
"@types/node",\
"@types/swc__core",\
"@types/swc__wasm",\
"@types/typescript",\
"typescript"\
],\
"linkType": "HARD"\
}]\
]],\
["tslib", [\

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -15,21 +15,26 @@
"lint:auth": "yarn workspace @standardnotes/auth-server lint",
"lint:scheduler": "yarn workspace @standardnotes/scheduler-server lint",
"lint:syncing-server": "yarn workspace @standardnotes/syncing-server lint",
"lint:files": "yarn workspace @standardnotes/files-server lint",
"test": "yarn workspaces foreach -p -j 10 --verbose run test",
"test:auth": "yarn workspace @standardnotes/auth-server test",
"test:scheduler": "yarn workspace @standardnotes/scheduler-server test",
"test:syncing-server": "yarn workspace @standardnotes/syncing-server test",
"test:files": "yarn workspace @standardnotes/files-server test",
"clean": "yarn workspaces foreach -p --verbose run clean",
"setup:env": "yarn workspaces foreach -p --verbose run setup:env",
"build": "yarn workspaces foreach -pt -j 10 --verbose run build",
"build:auth": "yarn workspace @standardnotes/auth-server build",
"build:scheduler": "yarn workspace @standardnotes/scheduler-server build",
"build:syncing-server": "yarn workspace @standardnotes/syncing-server build",
"build:files": "yarn workspace @standardnotes/files-server build",
"start:auth": "yarn workspace @standardnotes/auth-server start",
"start:auth-worker": "yarn workspace @standardnotes/auth-server worker",
"start:scheduler": "yarn workspace @standardnotes/scheduler-server worker",
"start:syncing-server": "yarn workspace @standardnotes/syncing-server start",
"start:syncing-server-worker": "yarn workspace @standardnotes/syncing-server worker",
"start:files": "yarn workspace @standardnotes/files-server start",
"start:files-worker": "yarn workspace @standardnotes/files-server worker",
"release:beta": "lerna version --conventional-prerelease --conventional-commits --yes -m \"chore(release): publish\""
},
"devDependencies": {

View file

@ -0,0 +1,35 @@
LOG_LEVEL=debug
NODE_ENV=development
VERSION=development
PORT=3000
REDIS_URL=redis://cache
REDIS_EVENTS_CHANNEL=events
VALET_TOKEN_SECRET=change-me-!
MAX_CHUNK_BYTES=1000000
# (Optional) New Relic Setup
NEW_RELIC_ENABLED=false
NEW_RELIC_APP_NAME=Syncing Server JS
NEW_RELIC_LICENSE_KEY=
NEW_RELIC_NO_CONFIG_FILE=true
NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=false
NEW_RELIC_LOG_ENABLED=false
NEW_RELIC_LOG_LEVEL=info
# (Optional) AWS Setup
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
S3_BUCKET_NAME=
S3_AWS_REGION=
S3_ENDPOINT=
SNS_TOPIC_ARN=
SNS_AWS_REGION=
SQS_QUEUE_URL=
SQS_AWS_REGION=
# (Optional) File upload path (relative to root directory)
FILE_UPLOAD_PATH=

View file

@ -0,0 +1,3 @@
dist
test-setup.ts
data

6
packages/files/.eslintrc Normal file
View file

@ -0,0 +1,6 @@
{
"extends": "../../.eslintrc",
"parserOptions": {
"project": "./linter.tsconfig.json"
}
}

25
packages/files/Dockerfile Normal file
View file

@ -0,0 +1,25 @@
FROM node:16.15.1-alpine AS builder
# Install dependencies for building native libraries
RUN apk add --update git openssh-client python3 alpine-sdk
WORKDIR /workspace
# docker-build plugin copies everything needed for `yarn install` to `manifests` folder.
COPY manifests ./
RUN yarn install --immutable
FROM node:16.15.1-alpine
WORKDIR /workspace
# Copy the installed dependencies from the previous stage.
COPY --from=builder /workspace ./
# docker-build plugin runs `yarn pack` in all workspace dependencies and copies them to `packs` folder.
COPY packs ./
ENTRYPOINT [ "/workspace/packages/auth/docker/entrypoint.sh" ]
CMD [ "start-web" ]

View file

@ -0,0 +1,104 @@
import 'reflect-metadata'
import 'newrelic'
import * as Sentry from '@sentry/node'
import * as busboy from 'connect-busboy'
import '../src/Controller/HealthCheckController'
import '../src/Controller/FilesController'
import * as helmet from 'helmet'
import * as cors from 'cors'
import { urlencoded, json, raw, Request, Response, NextFunction, RequestHandler, ErrorRequestHandler } from 'express'
import * as winston from 'winston'
import { InversifyExpressServer } from 'inversify-express-utils'
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
import TYPES from '../src/Bootstrap/Types'
import { Env } from '../src/Bootstrap/Env'
const container = new ContainerConfigLoader()
void container.load().then((container) => {
const env: Env = new Env()
env.load()
const server = new InversifyExpressServer(container)
server.setConfig((app) => {
app.use((_request: Request, response: Response, next: NextFunction) => {
response.setHeader('X-Files-Version', container.get(TYPES.VERSION))
next()
})
app.use(
busboy({
highWaterMark: 2 * 1024 * 1024,
}),
)
/* eslint-disable */
app.use(helmet({
contentSecurityPolicy: {
directives: {
defaultSrc: ["https: 'self'"],
baseUri: ["'self'"],
childSrc: ["*", "blob:"],
connectSrc: ["*"],
fontSrc: ["*", "'self'"],
formAction: ["'self'"],
frameAncestors: ["*", "*.standardnotes.org", "*.standardnotes.com"],
frameSrc: ["*", "blob:"],
imgSrc: ["'self'", "*", "data:"],
manifestSrc: ["'self'"],
mediaSrc: ["'self'"],
objectSrc: ["'self'"],
scriptSrc: ["'self'"],
styleSrc: ["'self'"]
}
}
}))
/* eslint-enable */
app.use(json({ limit: '50mb' }))
app.use(raw({ limit: '50mb', type: 'application/octet-stream' }))
app.use(urlencoded({ extended: true, limit: '50mb' }))
app.use(
cors({
exposedHeaders: ['Content-Range', 'Accept-Ranges'],
}),
)
if (env.get('SENTRY_DSN', true)) {
Sentry.init({
dsn: env.get('SENTRY_DSN'),
integrations: [new Sentry.Integrations.Http({ tracing: false, breadcrumbs: true })],
tracesSampleRate: 0,
})
app.use(Sentry.Handlers.requestHandler() as RequestHandler)
}
})
const logger: winston.Logger = container.get(TYPES.Logger)
server.setErrorConfig((app) => {
if (env.get('SENTRY_DSN', true)) {
app.use(Sentry.Handlers.errorHandler() as ErrorRequestHandler)
}
app.use((error: Record<string, unknown>, _request: Request, response: Response, _next: NextFunction) => {
logger.error(error.stack)
response.status(500).send({
error: {
message:
"Unfortunately, we couldn't handle your request. Please try again or contact our support if the error persists.",
},
})
})
})
const serverInstance = server.build()
serverInstance.listen(env.get('PORT'))
logger.info(`Server started on port ${process.env.PORT}`)
})

View file

@ -0,0 +1,29 @@
import 'reflect-metadata'
import 'newrelic'
import { Logger } from 'winston'
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
import TYPES from '../src/Bootstrap/Types'
import { Env } from '../src/Bootstrap/Env'
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
import * as dayjs from 'dayjs'
import * as utc from 'dayjs/plugin/utc'
const container = new ContainerConfigLoader()
void container.load().then((container) => {
dayjs.extend(utc)
const env: Env = new Env()
env.load()
const logger: Logger = container.get(TYPES.Logger)
logger.info('Starting worker...')
const subscriberFactory: DomainEventSubscriberFactoryInterface = container.get(TYPES.DomainEventSubscriberFactory)
subscriberFactory.create().start()
setInterval(() => logger.info('Alive and kicking!'), 20 * 60 * 1000)
})

View file

@ -0,0 +1,27 @@
#!/bin/sh
set -e
COMMAND=$1 && shift 1
case "$COMMAND" in
'start-local')
echo "Starting Web in Local Mode..."
yarn workspace @standardnotes/files-server start:local
;;
'start-web' )
echo "Starting Web..."
yarn workspace @standardnotes/files-server start
;;
'start-worker' )
echo "Starting Worker..."
yarn workspace @standardnotes/files-server worker
;;
* )
echo "Unknown command"
;;
esac
exec "$@"

View file

@ -0,0 +1,19 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires
const base = require('../../jest.config');
module.exports = {
...base,
globals: {
'ts-jest': {
tsconfig: 'tsconfig.json',
},
},
coveragePathIgnorePatterns: [
'/Bootstrap/',
'HealthCheckController',
"/Infra/FS"
],
setupFilesAfterEnv: [
'./test-setup.ts'
]
};

View file

@ -0,0 +1,4 @@
{
"extends": "./tsconfig.json",
"exclude": ["dist", "test-setup.ts"]
}

View file

@ -0,0 +1,73 @@
{
"name": "@standardnotes/files-server",
"version": "1.0.0",
"engines": {
"node": ">=16.0.0 <17.0.0"
},
"description": "Standard Notes Files Server",
"main": "dist/src/index.js",
"typings": "dist/src/index.d.ts",
"repository": "git@github.com:standardnotes/files.git",
"authors": [
"Karol Sójko <karol@standardnotes.com>"
],
"license": "AGPL-3.0-or-later",
"scripts": {
"clean": "rm -fr dist",
"prebuild": "yarn clean",
"build": "tsc --rootDir ./",
"lint": "eslint . --ext .ts",
"pretest": "yarn lint && yarn build",
"test": "jest --coverage --config=./jest.config.js --maxWorkers=50%",
"start": "yarn node dist/bin/server.js",
"worker": "yarn node dist/bin/worker.js"
},
"dependencies": {
"@newrelic/native-metrics": "7.0.2",
"@sentry/node": "^6.16.1",
"@standardnotes/auth": "^3.18.9",
"@standardnotes/common": "^1.19.4",
"@standardnotes/domain-events": "^2.27.6",
"@standardnotes/domain-events-infra": "^1.4.93",
"@standardnotes/sncrypto-common": "^1.3.0",
"@standardnotes/sncrypto-node": "^1.3.0",
"@standardnotes/time": "^1.4.5",
"aws-sdk": "^2.1158.0",
"connect-busboy": "^1.0.0",
"cors": "^2.8.5",
"dayjs": "^1.11.3",
"dotenv": "^8.2.0",
"express": "^4.17.1",
"express-winston": "^4.0.5",
"helmet": "^4.3.1",
"inversify": "^6.0.1",
"inversify-express-utils": "^6.4.3",
"ioredis": "^5.0.6",
"jsonwebtoken": "^8.5.1",
"newrelic": "^7.3.1",
"nodemon": "^2.0.15",
"prettyjson": "^1.2.1",
"reflect-metadata": "^0.1.13",
"ts-node": "^10.4.0",
"winston": "^3.3.3"
},
"devDependencies": {
"@standardnotes/config": "2.0.1",
"@types/connect-busboy": "^1.0.0",
"@types/cors": "^2.8.9",
"@types/express": "^4.17.11",
"@types/ioredis": "^4.28.10",
"@types/jest": "^28.1.3",
"@types/jsonwebtoken": "^8.5.0",
"@types/newrelic": "^7.0.1",
"@types/prettyjson": "^0.0.29",
"@types/uuid": "^8.3.0",
"@typescript-eslint/eslint-plugin": "^5.29.0",
"eslint": "^8.14.0",
"eslint-plugin-prettier": "^4.0.0",
"jest": "^28.1.1",
"nodemon": "^2.0.16",
"ts-jest": "^28.0.1",
"uuid": "^8.3.2"
}
}

View file

@ -0,0 +1,226 @@
import * as winston from 'winston'
import Redis from 'ioredis'
import * as AWS from 'aws-sdk'
import { Container } from 'inversify'
import { Env } from './Env'
import TYPES from './Types'
import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk'
import { ValetTokenAuthMiddleware } from '../Controller/ValetTokenAuthMiddleware'
import { TokenDecoder, TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth'
import { Timer, TimerInterface } from '@standardnotes/time'
import { DomainEventFactoryInterface } from '../Domain/Event/DomainEventFactoryInterface'
import { DomainEventFactory } from '../Domain/Event/DomainEventFactory'
import {
RedisDomainEventPublisher,
RedisDomainEventSubscriberFactory,
RedisEventMessageHandler,
SNSDomainEventPublisher,
SQSDomainEventSubscriberFactory,
SQSEventMessageHandler,
SQSNewRelicEventMessageHandler,
} from '@standardnotes/domain-events-infra'
import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile'
import { FileDownloaderInterface } from '../Domain/Services/FileDownloaderInterface'
import { S3FileDownloader } from '../Infra/S3/S3FileDownloader'
import { FileUploaderInterface } from '../Domain/Services/FileUploaderInterface'
import { S3FileUploader } from '../Infra/S3/S3FileUploader'
import { FSFileDownloader } from '../Infra/FS/FSFileDownloader'
import { FSFileUploader } from '../Infra/FS/FSFileUploader'
import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession'
import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession'
import { UploadRepositoryInterface } from '../Domain/Upload/UploadRepositoryInterface'
import { RedisUploadRepository } from '../Infra/Redis/RedisUploadRepository'
import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata'
import { FileRemoverInterface } from '../Domain/Services/FileRemoverInterface'
import { S3FileRemover } from '../Infra/S3/S3FileRemover'
import { FSFileRemover } from '../Infra/FS/FSFileRemover'
import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile'
import {
DomainEventHandlerInterface,
DomainEventMessageHandlerInterface,
DomainEventSubscriberFactoryInterface,
} from '@standardnotes/domain-events'
import { MarkFilesToBeRemoved } from '../Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
import { AccountDeletionRequestedEventHandler } from '../Domain/Handler/AccountDeletionRequestedEventHandler'
import { SharedSubscriptionInvitationCanceledEventHandler } from '../Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler'
export class ContainerConfigLoader {
async load(): Promise<Container> {
const env: Env = new Env()
env.load()
const container = new Container()
const logger = this.createLogger({ env })
container.bind<winston.Logger>(TYPES.Logger).toConstantValue(logger)
// env vars
container.bind(TYPES.S3_BUCKET_NAME).toConstantValue(env.get('S3_BUCKET_NAME', true))
container.bind(TYPES.S3_AWS_REGION).toConstantValue(env.get('S3_AWS_REGION', true))
container.bind(TYPES.VALET_TOKEN_SECRET).toConstantValue(env.get('VALET_TOKEN_SECRET'))
container.bind(TYPES.SNS_TOPIC_ARN).toConstantValue(env.get('SNS_TOPIC_ARN', true))
container.bind(TYPES.SNS_AWS_REGION).toConstantValue(env.get('SNS_AWS_REGION', true))
container.bind(TYPES.REDIS_URL).toConstantValue(env.get('REDIS_URL'))
container.bind(TYPES.REDIS_EVENTS_CHANNEL).toConstantValue(env.get('REDIS_EVENTS_CHANNEL'))
container.bind(TYPES.MAX_CHUNK_BYTES).toConstantValue(+env.get('MAX_CHUNK_BYTES'))
container.bind(TYPES.VERSION).toConstantValue(env.get('VERSION'))
container.bind(TYPES.SQS_QUEUE_URL).toConstantValue(env.get('SQS_QUEUE_URL', true))
container
.bind(TYPES.FILE_UPLOAD_PATH)
.toConstantValue(env.get('FILE_UPLOAD_PATH', true) ?? `${__dirname}/../../uploads`)
const redisUrl = container.get(TYPES.REDIS_URL) as string
const isRedisInClusterMode = redisUrl.indexOf(',') > 0
let redis
if (isRedisInClusterMode) {
redis = new Redis.Cluster(redisUrl.split(','))
} else {
redis = new Redis(redisUrl)
}
container.bind(TYPES.Redis).toConstantValue(redis)
if (env.get('AWS_ACCESS_KEY_ID', true)) {
AWS.config.credentials = new AWS.EnvironmentCredentials('AWS')
}
if (env.get('S3_AWS_REGION', true) || env.get('S3_ENDPOINT', true)) {
const s3Opts: AWS.S3.Types.ClientConfiguration = {
apiVersion: 'latest',
}
if (env.get('S3_AWS_REGION', true)) {
s3Opts.region = env.get('S3_AWS_REGION', true)
}
if (env.get('S3_ENDPOINT', true)) {
s3Opts.endpoint = new AWS.Endpoint(env.get('S3_ENDPOINT', true))
}
const s3Client = new AWS.S3(s3Opts)
container.bind<AWS.S3>(TYPES.S3).toConstantValue(s3Client)
container.bind<FileDownloaderInterface>(TYPES.FileDownloader).to(S3FileDownloader)
container.bind<FileUploaderInterface>(TYPES.FileUploader).to(S3FileUploader)
container.bind<FileRemoverInterface>(TYPES.FileRemover).to(S3FileRemover)
} else {
container.bind<FileDownloaderInterface>(TYPES.FileDownloader).to(FSFileDownloader)
container
.bind<FileUploaderInterface>(TYPES.FileUploader)
.toConstantValue(new FSFileUploader(container.get(TYPES.FILE_UPLOAD_PATH), container.get(TYPES.Logger)))
container.bind<FileRemoverInterface>(TYPES.FileRemover).to(FSFileRemover)
}
if (env.get('SNS_AWS_REGION', true)) {
container.bind<AWS.SNS>(TYPES.SNS).toConstantValue(
new AWS.SNS({
apiVersion: 'latest',
region: env.get('SNS_AWS_REGION', true),
}),
)
}
if (env.get('SQS_QUEUE_URL', true)) {
const sqsConfig: AWS.SQS.Types.ClientConfiguration = {
apiVersion: 'latest',
region: env.get('SQS_AWS_REGION', true),
}
if (env.get('SQS_ACCESS_KEY_ID', true) && env.get('SQS_SECRET_ACCESS_KEY', true)) {
sqsConfig.credentials = {
accessKeyId: env.get('SQS_ACCESS_KEY_ID', true),
secretAccessKey: env.get('SQS_SECRET_ACCESS_KEY', true),
}
}
container.bind<AWS.SQS>(TYPES.SQS).toConstantValue(new AWS.SQS(sqsConfig))
}
// use cases
container.bind<UploadFileChunk>(TYPES.UploadFileChunk).to(UploadFileChunk)
container.bind<StreamDownloadFile>(TYPES.StreamDownloadFile).to(StreamDownloadFile)
container.bind<CreateUploadSession>(TYPES.CreateUploadSession).to(CreateUploadSession)
container.bind<FinishUploadSession>(TYPES.FinishUploadSession).to(FinishUploadSession)
container.bind<GetFileMetadata>(TYPES.GetFileMetadata).to(GetFileMetadata)
container.bind<RemoveFile>(TYPES.RemoveFile).to(RemoveFile)
container.bind<MarkFilesToBeRemoved>(TYPES.MarkFilesToBeRemoved).to(MarkFilesToBeRemoved)
// middleware
container.bind<ValetTokenAuthMiddleware>(TYPES.ValetTokenAuthMiddleware).to(ValetTokenAuthMiddleware)
// services
container
.bind<TokenDecoderInterface<ValetTokenData>>(TYPES.ValetTokenDecoder)
.toConstantValue(new TokenDecoder<ValetTokenData>(container.get(TYPES.VALET_TOKEN_SECRET)))
container.bind<TimerInterface>(TYPES.Timer).toConstantValue(new Timer())
container.bind<DomainEventFactoryInterface>(TYPES.DomainEventFactory).to(DomainEventFactory)
// repositories
container.bind<UploadRepositoryInterface>(TYPES.UploadRepository).to(RedisUploadRepository)
if (env.get('SNS_TOPIC_ARN', true)) {
container
.bind<SNSDomainEventPublisher>(TYPES.DomainEventPublisher)
.toConstantValue(new SNSDomainEventPublisher(container.get(TYPES.SNS), container.get(TYPES.SNS_TOPIC_ARN)))
} else {
container
.bind<RedisDomainEventPublisher>(TYPES.DomainEventPublisher)
.toConstantValue(
new RedisDomainEventPublisher(container.get(TYPES.Redis), container.get(TYPES.REDIS_EVENTS_CHANNEL)),
)
}
// Handlers
container
.bind<AccountDeletionRequestedEventHandler>(TYPES.AccountDeletionRequestedEventHandler)
.to(AccountDeletionRequestedEventHandler)
container
.bind<SharedSubscriptionInvitationCanceledEventHandler>(TYPES.SharedSubscriptionInvitationCanceledEventHandler)
.to(SharedSubscriptionInvitationCanceledEventHandler)
const eventHandlers: Map<string, DomainEventHandlerInterface> = new Map([
['ACCOUNT_DELETION_REQUESTED', container.get(TYPES.AccountDeletionRequestedEventHandler)],
[
'SHARED_SUBSCRIPTION_INVITATION_CANCELED',
container.get(TYPES.SharedSubscriptionInvitationCanceledEventHandler),
],
])
if (env.get('SQS_QUEUE_URL', true)) {
container
.bind<DomainEventMessageHandlerInterface>(TYPES.DomainEventMessageHandler)
.toConstantValue(
env.get('NEW_RELIC_ENABLED', true) === 'true'
? new SQSNewRelicEventMessageHandler(eventHandlers, container.get(TYPES.Logger))
: new SQSEventMessageHandler(eventHandlers, container.get(TYPES.Logger)),
)
container
.bind<DomainEventSubscriberFactoryInterface>(TYPES.DomainEventSubscriberFactory)
.toConstantValue(
new SQSDomainEventSubscriberFactory(
container.get(TYPES.SQS),
container.get(TYPES.SQS_QUEUE_URL),
container.get(TYPES.DomainEventMessageHandler),
),
)
} else {
container
.bind<DomainEventMessageHandlerInterface>(TYPES.DomainEventMessageHandler)
.toConstantValue(new RedisEventMessageHandler(eventHandlers, container.get(TYPES.Logger)))
container
.bind<DomainEventSubscriberFactoryInterface>(TYPES.DomainEventSubscriberFactory)
.toConstantValue(
new RedisDomainEventSubscriberFactory(
container.get(TYPES.Redis),
container.get(TYPES.DomainEventMessageHandler),
container.get(TYPES.REDIS_EVENTS_CHANNEL),
),
)
}
return container
}
createLogger({ env }: { env: Env }): winston.Logger {
return winston.createLogger({
level: env.get('LOG_LEVEL') || 'info',
format: winston.format.combine(winston.format.splat(), winston.format.json()),
transports: [new winston.transports.Console({ level: env.get('LOG_LEVEL') || 'info' })],
})
}
}

View file

@ -0,0 +1,24 @@
import { config, DotenvParseOutput } from 'dotenv'
import { injectable } from 'inversify'
@injectable()
export class Env {
private env?: DotenvParseOutput
public load(): void {
const output = config()
this.env = <DotenvParseOutput>output.parsed
}
public get(key: string, optional = false): string {
if (!this.env) {
this.load()
}
if (!process.env[key] && !optional) {
throw new Error(`Environment variable ${key} not set`)
}
return <string>process.env[key]
}
}

View file

@ -0,0 +1,58 @@
const TYPES = {
Logger: Symbol.for('Logger'),
HTTPClient: Symbol.for('HTTPClient'),
Redis: Symbol.for('Redis'),
S3: Symbol.for('S3'),
SNS: Symbol.for('SNS'),
SQS: Symbol.for('SQS'),
// use cases
UploadFileChunk: Symbol.for('UploadFileChunk'),
StreamDownloadFile: Symbol.for('StreamDownloadFile'),
CreateUploadSession: Symbol.for('CreateUploadSession'),
FinishUploadSession: Symbol.for('FinishUploadSession'),
GetFileMetadata: Symbol.for('GetFileMetadata'),
RemoveFile: Symbol.for('RemoveFile'),
MarkFilesToBeRemoved: Symbol.for('MarkFilesToBeRemoved'),
// services
ValetTokenDecoder: Symbol.for('ValetTokenDecoder'),
Timer: Symbol.for('Timer'),
DomainEventFactory: Symbol.for('DomainEventFactory'),
DomainEventPublisher: Symbol.for('DomainEventPublisher'),
FileUploader: Symbol.for('FileUploader'),
FileDownloader: Symbol.for('FileDownloader'),
FileRemover: Symbol.for('FileRemover'),
// repositories
UploadRepository: Symbol.for('UploadRepository'),
// middleware
ValetTokenAuthMiddleware: Symbol.for('ValetTokenAuthMiddleware'),
// env vars
AWS_ACCESS_KEY_ID: Symbol.for('AWS_ACCESS_KEY_ID'),
AWS_SECRET_ACCESS_KEY: Symbol.for('AWS_SECRET_ACCESS_KEY'),
S3_ENDPOINT: Symbol.for('S3_ENDPOINT'),
S3_BUCKET_NAME: Symbol.for('S3_BUCKET_NAME'),
S3_AWS_REGION: Symbol.for('S3_AWS_REGION'),
SNS_TOPIC_ARN: Symbol.for('SNS_TOPIC_ARN'),
SNS_AWS_REGION: Symbol.for('SNS_AWS_REGION'),
SQS_QUEUE_URL: Symbol.for('SQS_QUEUE_URL'),
SQS_AWS_REGION: Symbol.for('SQS_AWS_REGION'),
VALET_TOKEN_SECRET: Symbol.for('VALET_TOKEN_SECRET'),
REDIS_URL: Symbol.for('REDIS_URL'),
REDIS_EVENTS_CHANNEL: Symbol.for('REDIS_EVENTS_CHANNEL'),
MAX_CHUNK_BYTES: Symbol.for('MAX_CHUNK_BYTES'),
VERSION: Symbol.for('VERSION'),
NEW_RELIC_ENABLED: Symbol.for('NEW_RELIC_ENABLED'),
FILE_UPLOAD_PATH: Symbol.for('FILE_UPLOAD_PATH'),
// Handlers
DomainEventMessageHandler: Symbol.for('DomainEventMessageHandler'),
DomainEventSubscriberFactory: Symbol.for('DomainEventSubscriberFactory'),
AccountDeletionRequestedEventHandler: Symbol.for('AccountDeletionRequestedEventHandler'),
SharedSubscriptionInvitationCanceledEventHandler: Symbol.for('SharedSubscriptionInvitationCanceledEventHandler'),
}
export default TYPES

View file

@ -0,0 +1,259 @@
import 'reflect-metadata'
import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession'
import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession'
import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile'
import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk'
import { Request, Response } from 'express'
import { Writable, Readable } from 'stream'
import { FilesController } from './FilesController'
import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata'
import { results } from 'inversify-express-utils'
import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile'
describe('FilesController', () => {
let uploadFileChunk: UploadFileChunk
let createUploadSession: CreateUploadSession
let finishUploadSession: FinishUploadSession
let streamDownloadFile: StreamDownloadFile
let getFileMetadata: GetFileMetadata
let removeFile: RemoveFile
let request: Request
let response: Response
let readStream: Readable
const maxChunkBytes = 100_000
const createController = () =>
new FilesController(
uploadFileChunk,
createUploadSession,
finishUploadSession,
streamDownloadFile,
getFileMetadata,
removeFile,
maxChunkBytes,
)
beforeEach(() => {
readStream = {} as jest.Mocked<Readable>
readStream.pipe = jest.fn().mockReturnValue(new Writable())
streamDownloadFile = {} as jest.Mocked<StreamDownloadFile>
streamDownloadFile.execute = jest.fn().mockReturnValue({ success: true, readStream })
uploadFileChunk = {} as jest.Mocked<UploadFileChunk>
uploadFileChunk.execute = jest.fn().mockReturnValue({ success: true })
createUploadSession = {} as jest.Mocked<CreateUploadSession>
createUploadSession.execute = jest.fn().mockReturnValue({ success: true, uploadId: '123' })
finishUploadSession = {} as jest.Mocked<FinishUploadSession>
finishUploadSession.execute = jest.fn().mockReturnValue({ success: true })
getFileMetadata = {} as jest.Mocked<GetFileMetadata>
getFileMetadata.execute = jest.fn().mockReturnValue({ success: true, size: 555_555 })
removeFile = {} as jest.Mocked<RemoveFile>
removeFile.execute = jest.fn().mockReturnValue({ success: true })
request = {
body: {},
headers: {},
} as jest.Mocked<Request>
response = {
locals: {},
} as jest.Mocked<Response>
response.locals.userUuid = '1-2-3'
response.locals.permittedResources = [
{
remoteIdentifier: '2-3-4',
unencryptedFileSize: 123,
},
]
response.writeHead = jest.fn()
})
it('should return a writable stream upon file download', async () => {
request.headers['range'] = 'bytes=0-'
const result = (await createController().download(request, response)) as () => Writable
expect(response.writeHead).toHaveBeenCalledWith(206, {
'Accept-Ranges': 'bytes',
'Content-Length': 100000,
'Content-Range': 'bytes 0-99999/555555',
'Content-Type': 'application/octet-stream',
})
expect(result()).toBeInstanceOf(Writable)
})
it('should return proper byte range on consecutive calls', async () => {
request.headers['range'] = 'bytes=0-'
;(await createController().download(request, response)) as () => Writable
request.headers['range'] = 'bytes=100000-'
;(await createController().download(request, response)) as () => Writable
expect(response.writeHead).toHaveBeenNthCalledWith(1, 206, {
'Accept-Ranges': 'bytes',
'Content-Length': 100000,
'Content-Range': 'bytes 0-99999/555555',
'Content-Type': 'application/octet-stream',
})
expect(response.writeHead).toHaveBeenNthCalledWith(2, 206, {
'Accept-Ranges': 'bytes',
'Content-Length': 100000,
'Content-Range': 'bytes 100000-199999/555555',
'Content-Type': 'application/octet-stream',
})
})
it('should return a writable stream with custom chunk size', async () => {
request.headers['x-chunk-size'] = '50000'
request.headers['range'] = 'bytes=0-'
const result = (await createController().download(request, response)) as () => Writable
expect(response.writeHead).toHaveBeenCalledWith(206, {
'Accept-Ranges': 'bytes',
'Content-Length': 50000,
'Content-Range': 'bytes 0-49999/555555',
'Content-Type': 'application/octet-stream',
})
expect(result()).toBeInstanceOf(Writable)
})
it('should default to maximum chunk size if custom chunk size is too large', async () => {
request.headers['x-chunk-size'] = '200000'
request.headers['range'] = 'bytes=0-'
const result = (await createController().download(request, response)) as () => Writable
expect(response.writeHead).toHaveBeenCalledWith(206, {
'Accept-Ranges': 'bytes',
'Content-Length': 100000,
'Content-Range': 'bytes 0-99999/555555',
'Content-Type': 'application/octet-stream',
})
expect(result()).toBeInstanceOf(Writable)
})
it('should not return a writable stream if bytes range is not provided', async () => {
const httpResponse = await createController().download(request, response)
expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult)
})
it('should not return a writable stream if getting file metadata fails', async () => {
request.headers['range'] = 'bytes=0-'
getFileMetadata.execute = jest.fn().mockReturnValue({ success: false, message: 'error' })
const httpResponse = await createController().download(request, response)
expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult)
})
it('should not return a writable stream if creating download stream fails', async () => {
request.headers['range'] = 'bytes=0-'
streamDownloadFile.execute = jest.fn().mockReturnValue({ success: false, message: 'error' })
const httpResponse = await createController().download(request, response)
expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult)
})
it('should create an upload session', async () => {
await createController().startUpload(request, response)
expect(createUploadSession.execute).toHaveBeenCalledWith({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
})
it('should return bad request if upload session could not be created', async () => {
createUploadSession.execute = jest.fn().mockReturnValue({ success: false })
const httpResponse = await createController().startUpload(request, response)
const result = await httpResponse.executeAsync()
expect(result.statusCode).toEqual(400)
})
it('should finish an upload session', async () => {
await createController().finishUpload(request, response)
expect(finishUploadSession.execute).toHaveBeenCalledWith({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
})
it('should return bad request if upload session could not be finished', async () => {
finishUploadSession.execute = jest.fn().mockReturnValue({ success: false })
const httpResponse = await createController().finishUpload(request, response)
const result = await httpResponse.executeAsync()
expect(result.statusCode).toEqual(400)
})
it('should remove a file', async () => {
await createController().remove(request, response)
expect(removeFile.execute).toHaveBeenCalledWith({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
})
it('should return bad request if file removal could not be completed', async () => {
removeFile.execute = jest.fn().mockReturnValue({ success: false })
const httpResponse = await createController().remove(request, response)
const result = await httpResponse.executeAsync()
expect(result.statusCode).toEqual(400)
})
it('should upload a chunk to an upload session', async () => {
request.headers['x-chunk-id'] = '2'
request.body = Buffer.from([123])
await createController().uploadChunk(request, response)
expect(uploadFileChunk.execute).toHaveBeenCalledWith({
chunkId: 2,
data: Buffer.from([123]),
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
})
it('should return bad request if chunk could not be uploaded', async () => {
request.headers['x-chunk-id'] = '2'
request.body = Buffer.from([123])
uploadFileChunk.execute = jest.fn().mockReturnValue({ success: false })
const httpResponse = await createController().uploadChunk(request, response)
const result = await httpResponse.executeAsync()
expect(result.statusCode).toEqual(400)
})
it('should return bad request if chunk id is missing', async () => {
request.body = Buffer.from([123])
const httpResponse = await createController().uploadChunk(request, response)
const result = await httpResponse.executeAsync()
expect(result.statusCode).toEqual(400)
})
})

View file

@ -0,0 +1,154 @@
import { BaseHttpController, controller, httpDelete, httpGet, httpPost, results } from 'inversify-express-utils'
import { Request, Response } from 'express'
import { inject } from 'inversify'
import { Writable } from 'stream'
import TYPES from '../Bootstrap/Types'
import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk'
import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile'
import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession'
import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession'
import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata'
import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile'
@controller('/v1/files', TYPES.ValetTokenAuthMiddleware)
export class FilesController extends BaseHttpController {
constructor(
@inject(TYPES.UploadFileChunk) private uploadFileChunk: UploadFileChunk,
@inject(TYPES.CreateUploadSession) private createUploadSession: CreateUploadSession,
@inject(TYPES.FinishUploadSession) private finishUploadSession: FinishUploadSession,
@inject(TYPES.StreamDownloadFile) private streamDownloadFile: StreamDownloadFile,
@inject(TYPES.GetFileMetadata) private getFileMetadata: GetFileMetadata,
@inject(TYPES.RemoveFile) private removeFile: RemoveFile,
@inject(TYPES.MAX_CHUNK_BYTES) private maxChunkBytes: number,
) {
super()
}
@httpPost('/upload/create-session')
async startUpload(
_request: Request,
response: Response,
): Promise<results.BadRequestErrorMessageResult | results.JsonResult> {
const result = await this.createUploadSession.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
})
if (!result.success) {
return this.badRequest(result.message)
}
return this.json({ success: true, uploadId: result.uploadId })
}
@httpPost('/upload/chunk')
async uploadChunk(
request: Request,
response: Response,
): Promise<results.BadRequestErrorMessageResult | results.JsonResult> {
const chunkId = +(request.headers['x-chunk-id'] as string)
if (!chunkId) {
return this.badRequest('Missing x-chunk-id header in request.')
}
const result = await this.uploadFileChunk.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
chunkId,
data: request.body,
})
if (!result.success) {
return this.badRequest(result.message)
}
return this.json({ success: true, message: 'Chunk uploaded successfully' })
}
@httpPost('/upload/close-session')
public async finishUpload(
_request: Request,
response: Response,
): Promise<results.BadRequestErrorMessageResult | results.JsonResult> {
const result = await this.finishUploadSession.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
uploadBytesLimit: response.locals.uploadBytesLimit,
uploadBytesUsed: response.locals.uploadBytesUsed,
})
if (!result.success) {
return this.badRequest(result.message)
}
return this.json({ success: true, message: 'File uploaded successfully' })
}
@httpDelete('/')
async remove(
_request: Request,
response: Response,
): Promise<results.BadRequestErrorMessageResult | results.JsonResult> {
const result = await this.removeFile.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
regularSubscriptionUuid: response.locals.regularSubscriptionUuid,
})
if (!result.success) {
return this.badRequest(result.message)
}
return this.json({ success: true, message: 'File removed successfully' })
}
@httpGet('/')
async download(
request: Request,
response: Response,
): Promise<results.BadRequestErrorMessageResult | (() => Writable)> {
const range = request.headers['range']
if (!range) {
return this.badRequest('File download requires range header to be set.')
}
let chunkSize = +(request.headers['x-chunk-size'] as string)
if (!chunkSize || chunkSize > this.maxChunkBytes) {
chunkSize = this.maxChunkBytes
}
const fileMetadata = await this.getFileMetadata.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
})
if (!fileMetadata.success) {
return this.badRequest(fileMetadata.message)
}
const startRange = Number(range.replace(/\D/g, ''))
const endRange = Math.min(startRange + chunkSize - 1, fileMetadata.size - 1)
const headers = {
'Content-Range': `bytes ${startRange}-${endRange}/${fileMetadata.size}`,
'Accept-Ranges': 'bytes',
'Content-Length': endRange - startRange + 1,
'Content-Type': 'application/octet-stream',
}
response.writeHead(206, headers)
const result = await this.streamDownloadFile.execute({
userUuid: response.locals.userUuid,
resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier,
startRange,
endRange,
})
if (!result.success) {
return this.badRequest(result.message)
}
return () => result.readStream.pipe(response)
}
}

View file

@ -0,0 +1,12 @@
import 'reflect-metadata'
import { HealthCheckController } from './HealthCheckController'
describe('HealthCheckController', () => {
const createController = () => new HealthCheckController()
it('should return OK', async () => {
const response = (await createController().get()) as string
expect(response).toEqual('OK')
})
})

View file

@ -0,0 +1,9 @@
import { controller, httpGet } from 'inversify-express-utils'
@controller('/healthcheck')
export class HealthCheckController {
@httpGet('/')
public async get(): Promise<string> {
return 'OK'
}
}

View file

@ -0,0 +1,203 @@
import 'reflect-metadata'
import { ValetTokenAuthMiddleware } from './ValetTokenAuthMiddleware'
import { NextFunction, Request, Response } from 'express'
import { Logger } from 'winston'
import { TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth'
describe('ValetTokenAuthMiddleware', () => {
let tokenDecoder: TokenDecoderInterface<ValetTokenData>
let request: Request
let response: Response
let next: NextFunction
const logger = {
debug: jest.fn(),
} as unknown as jest.Mocked<Logger>
const createMiddleware = () => new ValetTokenAuthMiddleware(tokenDecoder, logger)
beforeEach(() => {
tokenDecoder = {} as jest.Mocked<TokenDecoderInterface<ValetTokenData>>
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
userUuid: '1-2-3',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 30,
},
],
permittedOperation: 'write',
uploadBytesLimit: 100,
uploadBytesUsed: 80,
})
request = {
headers: {},
query: {},
body: {},
} as jest.Mocked<Request>
response = {
locals: {},
} as jest.Mocked<Response>
response.status = jest.fn().mockReturnThis()
response.send = jest.fn()
next = jest.fn()
})
it('should authorize user with a valet token', async () => {
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
userUuid: '1-2-3',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 30,
},
],
permittedOperation: 'write',
uploadBytesLimit: -1,
uploadBytesUsed: 80,
})
request.headers['x-valet-token'] = 'valet-token'
await createMiddleware().handler(request, response, next)
expect(response.locals).toEqual({
userUuid: '1-2-3',
permittedOperation: 'write',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 30,
},
],
uploadBytesLimit: -1,
uploadBytesUsed: 80,
})
expect(next).toHaveBeenCalled()
})
it('should authorize user with unlimited upload with a valet token', async () => {
request.headers['x-valet-token'] = 'valet-token'
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
userUuid: '1-2-3',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 10,
},
],
permittedOperation: 'write',
uploadBytesLimit: -1,
uploadBytesUsed: 80,
})
await createMiddleware().handler(request, response, next)
expect(response.locals).toEqual({
userUuid: '1-2-3',
permittedOperation: 'write',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 10,
},
],
uploadBytesLimit: -1,
uploadBytesUsed: 80,
})
expect(next).toHaveBeenCalled()
})
it('should not authorize user with no space left for upload', async () => {
request.headers['x-valet-token'] = 'valet-token'
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
userUuid: '1-2-3',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 21,
},
],
permittedOperation: 'write',
uploadBytesLimit: 100,
uploadBytesUsed: 80,
})
await createMiddleware().handler(request, response, next)
expect(response.status).toHaveBeenCalledWith(403)
expect(next).not.toHaveBeenCalled()
})
it('should authorize user with no space left for upload for download operations', async () => {
request.headers['x-valet-token'] = 'valet-token'
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
userUuid: '1-2-3',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 21,
},
],
permittedOperation: 'read',
uploadBytesLimit: 100,
uploadBytesUsed: 80,
})
await createMiddleware().handler(request, response, next)
expect(response.locals).toEqual({
userUuid: '1-2-3',
permittedOperation: 'read',
permittedResources: [
{
remoteIdentifier: '1-2-3/2-3-4',
unencryptedFileSize: 21,
},
],
uploadBytesLimit: 100,
uploadBytesUsed: 80,
})
expect(next).toHaveBeenCalled()
})
it('should not authorize if request is missing valet token in headers', async () => {
await createMiddleware().handler(request, response, next)
expect(response.status).toHaveBeenCalledWith(401)
expect(next).not.toHaveBeenCalled()
})
it('should not authorize if auth valet token is malformed', async () => {
request.headers['x-valet-token'] = 'valet-token'
tokenDecoder.decodeToken = jest.fn().mockReturnValue(undefined)
await createMiddleware().handler(request, response, next)
expect(response.status).toHaveBeenCalledWith(401)
expect(next).not.toHaveBeenCalled()
})
it('should pass the error to next middleware if one occurres', async () => {
request.headers['x-valet-token'] = 'valet-token'
const error = new Error('Ooops')
tokenDecoder.decodeToken = jest.fn().mockImplementation(() => {
throw error
})
await createMiddleware().handler(request, response, next)
expect(response.status).not.toHaveBeenCalled()
expect(next).toHaveBeenCalledWith(error)
})
})

View file

@ -0,0 +1,90 @@
import { TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth'
import { NextFunction, Request, Response } from 'express'
import { inject, injectable } from 'inversify'
import { BaseMiddleware } from 'inversify-express-utils'
import { Logger } from 'winston'
import TYPES from '../Bootstrap/Types'
@injectable()
export class ValetTokenAuthMiddleware extends BaseMiddleware {
constructor(
@inject(TYPES.ValetTokenDecoder) private tokenDecoder: TokenDecoderInterface<ValetTokenData>,
@inject(TYPES.Logger) private logger: Logger,
) {
super()
}
async handler(request: Request, response: Response, next: NextFunction): Promise<void> {
try {
const valetToken = request.headers['x-valet-token'] || request.body.valetToken || request.query.valetToken
if (!valetToken) {
this.logger.debug('ValetTokenAuthMiddleware missing valet token.')
response.status(401).send({
error: {
tag: 'invalid-auth',
message: 'Invalid valet token.',
},
})
return
}
const valetTokenData = this.tokenDecoder.decodeToken(valetToken)
if (valetTokenData === undefined) {
this.logger.debug('ValetTokenAuthMiddleware authentication failure.')
response.status(401).send({
error: {
tag: 'invalid-auth',
message: 'Invalid valet token.',
},
})
return
}
if (this.userHasNoSpaceToUpload(valetTokenData)) {
response.status(403).send({
error: {
tag: 'no-space',
message: 'The file you are trying to upload is too big. Please upgrade your subscription',
},
})
return
}
response.locals.userUuid = valetTokenData.userUuid
response.locals.permittedResources = valetTokenData.permittedResources
response.locals.permittedOperation = valetTokenData.permittedOperation
response.locals.uploadBytesUsed = valetTokenData.uploadBytesUsed
response.locals.uploadBytesLimit = valetTokenData.uploadBytesLimit
response.locals.regularSubscriptionUuid = valetTokenData.regularSubscriptionUuid
return next()
} catch (error) {
return next(error)
}
}
private userHasNoSpaceToUpload(valetTokenData: ValetTokenData) {
if (valetTokenData.permittedOperation !== 'write') {
return false
}
if (valetTokenData.uploadBytesLimit === -1) {
return false
}
const remainingUploadSpace = valetTokenData.uploadBytesLimit - valetTokenData.uploadBytesUsed
let consideredUploadSize = 0
for (const resource of valetTokenData.permittedResources) {
consideredUploadSize += resource.unencryptedFileSize as number
}
return remainingUploadSpace - consideredUploadSize <= 0
}
}

View file

@ -0,0 +1,72 @@
import 'reflect-metadata'
import { TimerInterface } from '@standardnotes/time'
import { DomainEventFactory } from './DomainEventFactory'
describe('DomainEventFactory', () => {
let timer: TimerInterface
const createFactory = () => new DomainEventFactory(timer)
beforeEach(() => {
timer = {} as jest.Mocked<TimerInterface>
timer.getUTCDate = jest.fn().mockReturnValue(new Date(1))
})
it('should create a FILE_UPLOADED event', () => {
expect(
createFactory().createFileUploadedEvent({
fileByteSize: 123,
fileName: '2-3-4',
filePath: '1-2-3/2-3-4',
userUuid: '1-2-3',
}),
).toEqual({
createdAt: new Date(1),
meta: {
correlation: {
userIdentifier: '1-2-3',
userIdentifierType: 'uuid',
},
origin: 'files',
},
payload: {
fileByteSize: 123,
fileName: '2-3-4',
filePath: '1-2-3/2-3-4',
userUuid: '1-2-3',
},
type: 'FILE_UPLOADED',
})
})
it('should create a FILE_REMOVED event', () => {
expect(
createFactory().createFileRemovedEvent({
fileByteSize: 123,
fileName: '2-3-4',
filePath: '1-2-3/2-3-4',
userUuid: '1-2-3',
regularSubscriptionUuid: '1-2-3',
}),
).toEqual({
createdAt: new Date(1),
meta: {
correlation: {
userIdentifier: '1-2-3',
userIdentifierType: 'uuid',
},
origin: 'files',
},
payload: {
fileByteSize: 123,
fileName: '2-3-4',
filePath: '1-2-3/2-3-4',
userUuid: '1-2-3',
regularSubscriptionUuid: '1-2-3',
},
type: 'FILE_REMOVED',
})
})
})

View file

@ -0,0 +1,53 @@
import { Uuid } from '@standardnotes/common'
import { FileUploadedEvent, FileRemovedEvent, DomainEventService } from '@standardnotes/domain-events'
import { TimerInterface } from '@standardnotes/time'
import { inject, injectable } from 'inversify'
import TYPES from '../../Bootstrap/Types'
import { DomainEventFactoryInterface } from './DomainEventFactoryInterface'
@injectable()
export class DomainEventFactory implements DomainEventFactoryInterface {
constructor(@inject(TYPES.Timer) private timer: TimerInterface) {}
createFileRemovedEvent(payload: {
userUuid: string
filePath: string
fileName: string
fileByteSize: number
regularSubscriptionUuid: Uuid
}): FileRemovedEvent {
return {
type: 'FILE_REMOVED',
createdAt: this.timer.getUTCDate(),
meta: {
correlation: {
userIdentifier: payload.userUuid,
userIdentifierType: 'uuid',
},
origin: DomainEventService.Files,
},
payload,
}
}
createFileUploadedEvent(payload: {
userUuid: string
filePath: string
fileName: string
fileByteSize: number
}): FileUploadedEvent {
return {
type: 'FILE_UPLOADED',
createdAt: this.timer.getUTCDate(),
meta: {
correlation: {
userIdentifier: payload.userUuid,
userIdentifierType: 'uuid',
},
origin: DomainEventService.Files,
},
payload,
}
}
}

View file

@ -0,0 +1,18 @@
import { Uuid } from '@standardnotes/common'
import { FileUploadedEvent, FileRemovedEvent } from '@standardnotes/domain-events'
export interface DomainEventFactoryInterface {
createFileUploadedEvent(payload: {
userUuid: string
filePath: string
fileName: string
fileByteSize: number
}): FileUploadedEvent
createFileRemovedEvent(payload: {
userUuid: string
filePath: string
fileName: string
fileByteSize: number
regularSubscriptionUuid: Uuid
}): FileRemovedEvent
}

View file

@ -0,0 +1,8 @@
import { Uuid } from '@standardnotes/common'
export type RemovedFileDescription = {
userUuid: Uuid
filePath: string
fileName: string
fileByteSize: number
}

View file

@ -0,0 +1,73 @@
import 'reflect-metadata'
import {
AccountDeletionRequestedEvent,
AccountDeletionRequestedEventPayload,
DomainEventPublisherInterface,
FileRemovedEvent,
} from '@standardnotes/domain-events'
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
import { AccountDeletionRequestedEventHandler } from './AccountDeletionRequestedEventHandler'
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
import { RemovedFileDescription } from '../File/RemovedFileDescription'
describe('AccountDeletionRequestedEventHandler', () => {
let markFilesToBeRemoved: MarkFilesToBeRemoved
let event: AccountDeletionRequestedEvent
let domainEventPublisher: DomainEventPublisherInterface
let domainEventFactory: DomainEventFactoryInterface
const createHandler = () =>
new AccountDeletionRequestedEventHandler(markFilesToBeRemoved, domainEventPublisher, domainEventFactory)
beforeEach(() => {
markFilesToBeRemoved = {} as jest.Mocked<MarkFilesToBeRemoved>
markFilesToBeRemoved.execute = jest.fn().mockReturnValue({
success: true,
filesRemoved: [{} as jest.Mocked<RemovedFileDescription>],
})
event = {} as jest.Mocked<AccountDeletionRequestedEvent>
event.payload = {
userUuid: '1-2-3',
regularSubscriptionUuid: '1-2-3',
} as jest.Mocked<AccountDeletionRequestedEventPayload>
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
domainEventPublisher.publish = jest.fn()
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked<FileRemovedEvent>)
})
it('should mark files to be remove for user', async () => {
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' })
expect(domainEventPublisher.publish).toHaveBeenCalled()
})
it('should not mark files to be remove for user if user has no regular subscription', async () => {
event.payload.regularSubscriptionUuid = undefined
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).not.toHaveBeenCalled()
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
it('should not publish events if failed to mark files to be removed', async () => {
markFilesToBeRemoved.execute = jest.fn().mockReturnValue({
success: false,
})
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' })
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
})

View file

@ -0,0 +1,42 @@
import {
AccountDeletionRequestedEvent,
DomainEventHandlerInterface,
DomainEventPublisherInterface,
} from '@standardnotes/domain-events'
import { inject, injectable } from 'inversify'
import TYPES from '../../Bootstrap/Types'
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
@injectable()
export class AccountDeletionRequestedEventHandler implements DomainEventHandlerInterface {
constructor(
@inject(TYPES.MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved,
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
) {}
async handle(event: AccountDeletionRequestedEvent): Promise<void> {
if (event.payload.regularSubscriptionUuid === undefined) {
return
}
const response = await this.markFilesToBeRemoved.execute({
userUuid: event.payload.userUuid,
})
if (!response.success) {
return
}
for (const fileRemoved of response.filesRemoved) {
await this.domainEventPublisher.publish(
this.domainEventFactory.createFileRemovedEvent({
regularSubscriptionUuid: event.payload.regularSubscriptionUuid,
...fileRemoved,
}),
)
}
}
}

View file

@ -0,0 +1,73 @@
import 'reflect-metadata'
import {
SharedSubscriptionInvitationCanceledEvent,
SharedSubscriptionInvitationCanceledEventPayload,
DomainEventPublisherInterface,
FileRemovedEvent,
} from '@standardnotes/domain-events'
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
import { SharedSubscriptionInvitationCanceledEventHandler } from './SharedSubscriptionInvitationCanceledEventHandler'
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
import { RemovedFileDescription } from '../File/RemovedFileDescription'
describe('SharedSubscriptionInvitationCanceledEventHandler', () => {
let markFilesToBeRemoved: MarkFilesToBeRemoved
let event: SharedSubscriptionInvitationCanceledEvent
let domainEventPublisher: DomainEventPublisherInterface
let domainEventFactory: DomainEventFactoryInterface
const createHandler = () =>
new SharedSubscriptionInvitationCanceledEventHandler(markFilesToBeRemoved, domainEventPublisher, domainEventFactory)
beforeEach(() => {
markFilesToBeRemoved = {} as jest.Mocked<MarkFilesToBeRemoved>
markFilesToBeRemoved.execute = jest.fn().mockReturnValue({
success: true,
filesRemoved: [{} as jest.Mocked<RemovedFileDescription>],
})
event = {} as jest.Mocked<SharedSubscriptionInvitationCanceledEvent>
event.payload = {
inviteeIdentifier: '1-2-3',
inviteeIdentifierType: 'uuid',
} as jest.Mocked<SharedSubscriptionInvitationCanceledEventPayload>
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
domainEventPublisher.publish = jest.fn()
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked<FileRemovedEvent>)
})
it('should mark files to be remove for user', async () => {
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' })
expect(domainEventPublisher.publish).toHaveBeenCalled()
})
it('should not mark files to be remove for user if identifier is not of uuid type', async () => {
event.payload.inviteeIdentifierType = 'email'
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).not.toHaveBeenCalled()
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
it('should not publish events if failed to mark files to be removed', async () => {
markFilesToBeRemoved.execute = jest.fn().mockReturnValue({
success: false,
})
await createHandler().handle(event)
expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' })
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
})

View file

@ -0,0 +1,42 @@
import {
SharedSubscriptionInvitationCanceledEvent,
DomainEventHandlerInterface,
DomainEventPublisherInterface,
} from '@standardnotes/domain-events'
import { inject, injectable } from 'inversify'
import TYPES from '../../Bootstrap/Types'
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
@injectable()
export class SharedSubscriptionInvitationCanceledEventHandler implements DomainEventHandlerInterface {
constructor(
@inject(TYPES.MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved,
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
) {}
async handle(event: SharedSubscriptionInvitationCanceledEvent): Promise<void> {
if (event.payload.inviteeIdentifierType !== 'uuid') {
return
}
const response = await this.markFilesToBeRemoved.execute({
userUuid: event.payload.inviteeIdentifier,
})
if (!response.success) {
return
}
for (const fileRemoved of response.filesRemoved) {
await this.domainEventPublisher.publish(
this.domainEventFactory.createFileRemovedEvent({
regularSubscriptionUuid: event.payload.inviterSubscriptionUuid,
...fileRemoved,
}),
)
}
}
}

View file

@ -0,0 +1,6 @@
import { Readable } from 'stream'
export interface FileDownloaderInterface {
createDownloadStream(filePath: string, startRange: number, endRange: number): Readable
getFileSize(filePath: string): Promise<number>
}

View file

@ -0,0 +1,6 @@
import { RemovedFileDescription } from '../File/RemovedFileDescription'
export interface FileRemoverInterface {
remove(filePath: string): Promise<number>
markFilesToBeRemoved(userUuid: string): Promise<Array<RemovedFileDescription>>
}

View file

@ -0,0 +1,9 @@
import { ChunkId } from '../Upload/ChunkId'
import { UploadChunkResult } from '../Upload/UploadChunkResult'
import { UploadId } from '../Upload/UploadId'
export interface FileUploaderInterface {
createUploadSession(filePath: string): Promise<UploadId>
uploadFileChunk(dto: { uploadId: string; data: Uint8Array; filePath: string; chunkId: ChunkId }): Promise<string>
finishUploadSession(uploadId: string, filePath: string, uploadChunkResults: Array<UploadChunkResult>): Promise<void>
}

View file

@ -0,0 +1 @@
export type ChunkId = number

View file

@ -0,0 +1,7 @@
import { ChunkId } from './ChunkId'
export type UploadChunkResult = {
chunkId: ChunkId
tag: string
chunkSize: number
}

View file

@ -0,0 +1 @@
export type UploadId = string

View file

@ -0,0 +1,9 @@
import { UploadChunkResult } from './UploadChunkResult'
import { UploadId } from './UploadId'
export interface UploadRepositoryInterface {
storeUploadSession(filePath: string, uploadId: UploadId): Promise<void>
retrieveUploadSessionId(filePath: string): Promise<UploadId | undefined>
storeUploadChunkResult(uploadId: UploadId, uploadChunkResult: UploadChunkResult): Promise<void>
retrieveUploadChunkResults(uploadId: UploadId): Promise<Array<UploadChunkResult>>
}

View file

@ -0,0 +1,53 @@
import 'reflect-metadata'
import { Logger } from 'winston'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
import { CreateUploadSession } from './CreateUploadSession'
describe('CreateUploadSession', () => {
let fileUploader: FileUploaderInterface
let uploadRepository: UploadRepositoryInterface
let logger: Logger
const createUseCase = () => new CreateUploadSession(fileUploader, uploadRepository, logger)
beforeEach(() => {
fileUploader = {} as jest.Mocked<FileUploaderInterface>
fileUploader.createUploadSession = jest.fn().mockReturnValue('123')
uploadRepository = {} as jest.Mocked<UploadRepositoryInterface>
uploadRepository.storeUploadSession = jest.fn()
logger = {} as jest.Mocked<Logger>
logger.debug = jest.fn()
logger.error = jest.fn()
logger.warn = jest.fn()
})
it('should indicate of an error in creating the upload session', async () => {
uploadRepository.storeUploadSession = jest.fn().mockImplementation(() => {
throw new Error('oops')
})
expect(
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
}),
).toEqual({
success: false,
message: 'Could not create upload session',
})
})
it('should create an upload session', async () => {
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
expect(fileUploader.createUploadSession).toHaveBeenCalledWith('1-2-3/2-3-4')
expect(uploadRepository.storeUploadSession).toHaveBeenCalledWith('1-2-3/2-3-4', '123')
})
})

View file

@ -0,0 +1,46 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { UseCaseInterface } from '../UseCaseInterface'
import { CreateUploadSessionDTO } from './CreateUploadSessionDTO'
import { CreateUploadSessionResponse } from './CreateUploadSessionResponse'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
@injectable()
export class CreateUploadSession implements UseCaseInterface {
constructor(
@inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface,
@inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: CreateUploadSessionDTO): Promise<CreateUploadSessionResponse> {
try {
this.logger.debug(`Creating upload session for resource: ${dto.resourceRemoteIdentifier}`)
const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}`
const uploadId = await this.fileUploader.createUploadSession(filePath)
this.logger.debug(`Created upload session with id: ${uploadId}`)
await this.uploadRepository.storeUploadSession(filePath, uploadId)
return {
success: true,
uploadId,
}
} catch (error) {
this.logger.error(
`Could not create upload session for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`,
)
return {
success: false,
message: 'Could not create upload session',
}
}
}
}

View file

@ -0,0 +1,6 @@
import { Uuid } from '@standardnotes/common'
export type CreateUploadSessionDTO = {
userUuid: Uuid
resourceRemoteIdentifier: string
}

View file

@ -0,0 +1,11 @@
import { UploadId } from '../../Upload/UploadId'
export type CreateUploadSessionResponse =
| {
success: true
uploadId: UploadId
}
| {
success: false
message: string
}

View file

@ -0,0 +1,112 @@
import 'reflect-metadata'
import { DomainEventPublisherInterface, FileUploadedEvent } from '@standardnotes/domain-events'
import { Logger } from 'winston'
import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
import { FinishUploadSession } from './FinishUploadSession'
describe('FinishUploadSession', () => {
let fileUploader: FileUploaderInterface
let uploadRepository: UploadRepositoryInterface
let domainEventPublisher: DomainEventPublisherInterface
let domainEventFactory: DomainEventFactoryInterface
let logger: Logger
const createUseCase = () =>
new FinishUploadSession(fileUploader, uploadRepository, domainEventPublisher, domainEventFactory, logger)
beforeEach(() => {
fileUploader = {} as jest.Mocked<FileUploaderInterface>
fileUploader.finishUploadSession = jest.fn().mockReturnValue('ETag123')
uploadRepository = {} as jest.Mocked<UploadRepositoryInterface>
uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue('123')
uploadRepository.retrieveUploadChunkResults = jest.fn().mockReturnValue([{ tag: '123', chunkId: 1, chunkSize: 1 }])
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
domainEventPublisher.publish = jest.fn()
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
domainEventFactory.createFileUploadedEvent = jest.fn().mockReturnValue({} as jest.Mocked<FileUploadedEvent>)
logger = {} as jest.Mocked<Logger>
logger.debug = jest.fn()
logger.error = jest.fn()
logger.warn = jest.fn()
})
it('should not finish an upload session if non existing', async () => {
uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue(undefined)
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
uploadBytesLimit: 100,
uploadBytesUsed: 0,
})
expect(fileUploader.finishUploadSession).not.toHaveBeenCalled()
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
it('should indicate of an error in finishing session fails', async () => {
uploadRepository.retrieveUploadSessionId = jest.fn().mockImplementation(() => {
throw new Error('oops')
})
expect(
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
uploadBytesLimit: 100,
uploadBytesUsed: 0,
}),
).toEqual({
success: false,
message: 'Could not finish upload session',
})
expect(fileUploader.finishUploadSession).not.toHaveBeenCalled()
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
it('should finish an upload session', async () => {
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
uploadBytesLimit: 100,
uploadBytesUsed: 0,
})
expect(fileUploader.finishUploadSession).toHaveBeenCalledWith('123', '1-2-3/2-3-4', [
{ tag: '123', chunkId: 1, chunkSize: 1 },
])
expect(domainEventPublisher.publish).toHaveBeenCalled()
})
it('should not finish an upload session if the file size exceeds storage quota', async () => {
uploadRepository.retrieveUploadChunkResults = jest.fn().mockReturnValue([
{ tag: '123', chunkId: 1, chunkSize: 60 },
{ tag: '234', chunkId: 2, chunkSize: 10 },
{ tag: '345', chunkId: 3, chunkSize: 20 },
])
expect(
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
uploadBytesLimit: 100,
uploadBytesUsed: 20,
}),
).toEqual({
success: false,
message: 'Could not finish upload session. You are out of space.',
})
expect(fileUploader.finishUploadSession).not.toHaveBeenCalled()
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
})

View file

@ -0,0 +1,79 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { UseCaseInterface } from '../UseCaseInterface'
import { FinishUploadSessionDTO } from './FinishUploadSessionDTO'
import { FinishUploadSessionResponse } from './FinishUploadSessionResponse'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface'
@injectable()
export class FinishUploadSession implements UseCaseInterface {
constructor(
@inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface,
@inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface,
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: FinishUploadSessionDTO): Promise<FinishUploadSessionResponse> {
try {
this.logger.debug(`Finishing upload session for resource: ${dto.resourceRemoteIdentifier}`)
const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}`
const uploadId = await this.uploadRepository.retrieveUploadSessionId(filePath)
if (uploadId === undefined) {
this.logger.warn(`Could not find upload session for file path: ${filePath}`)
return {
success: false,
message: 'Could not finish upload session',
}
}
const uploadChunkResults = await this.uploadRepository.retrieveUploadChunkResults(uploadId)
let totalFileSize = 0
for (const uploadChunkResult of uploadChunkResults) {
totalFileSize += uploadChunkResult.chunkSize
}
const remainingSpaceLeft = dto.uploadBytesLimit - dto.uploadBytesUsed
if (remainingSpaceLeft < totalFileSize) {
return {
success: false,
message: 'Could not finish upload session. You are out of space.',
}
}
await this.fileUploader.finishUploadSession(uploadId, filePath, uploadChunkResults)
await this.domainEventPublisher.publish(
this.domainEventFactory.createFileUploadedEvent({
userUuid: dto.userUuid,
filePath: `${dto.userUuid}/${dto.resourceRemoteIdentifier}`,
fileName: dto.resourceRemoteIdentifier,
fileByteSize: totalFileSize,
}),
)
return {
success: true,
}
} catch (error) {
this.logger.error(
`Could not finish upload session for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`,
)
return {
success: false,
message: 'Could not finish upload session',
}
}
}
}

View file

@ -0,0 +1,8 @@
import { Uuid } from '@standardnotes/common'
export type FinishUploadSessionDTO = {
userUuid: Uuid
resourceRemoteIdentifier: string
uploadBytesUsed: number
uploadBytesLimit: number
}

View file

@ -0,0 +1,8 @@
export type FinishUploadSessionResponse =
| {
success: true
}
| {
success: false
message: string
}

View file

@ -0,0 +1,38 @@
import 'reflect-metadata'
import { Logger } from 'winston'
import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface'
import { GetFileMetadata } from './GetFileMetadata'
describe('GetFileMetadata', () => {
let fileDownloader: FileDownloaderInterface
let logger: Logger
const createUseCase = () => new GetFileMetadata(fileDownloader, logger)
beforeEach(() => {
fileDownloader = {} as jest.Mocked<FileDownloaderInterface>
fileDownloader.getFileSize = jest.fn().mockReturnValue(123)
logger = {} as jest.Mocked<Logger>
logger.error = jest.fn()
})
it('should return the file metadata', async () => {
expect(await createUseCase().execute({ resourceRemoteIdentifier: '1-2-3', userUuid: '2-3-4' })).toEqual({
success: true,
size: 123,
})
})
it('should not return the file metadata if it fails', async () => {
fileDownloader.getFileSize = jest.fn().mockImplementation(() => {
throw new Error('ooops')
})
expect(await createUseCase().execute({ resourceRemoteIdentifier: '1-2-3', userUuid: '2-3-4' })).toEqual({
success: false,
message: 'Could not get file metadata.',
})
})
})

View file

@ -0,0 +1,32 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface'
import { UseCaseInterface } from '../UseCaseInterface'
import { GetFileMetadataDTO } from './GetFileMetadataDTO'
import { GetFileMetadataResponse } from './GetFileMetadataResponse'
@injectable()
export class GetFileMetadata implements UseCaseInterface {
constructor(
@inject(TYPES.FileDownloader) private fileDownloader: FileDownloaderInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: GetFileMetadataDTO): Promise<GetFileMetadataResponse> {
try {
const size = await this.fileDownloader.getFileSize(`${dto.userUuid}/${dto.resourceRemoteIdentifier}`)
return {
success: true,
size,
}
} catch (error) {
this.logger.error(`Could not get file metadata for resource: ${dto.userUuid}/${dto.resourceRemoteIdentifier}`)
return {
success: false,
message: 'Could not get file metadata.',
}
}
}
}

View file

@ -0,0 +1,4 @@
export type GetFileMetadataDTO = {
userUuid: string
resourceRemoteIdentifier: string
}

View file

@ -0,0 +1,9 @@
export type GetFileMetadataResponse =
| {
success: true
size: number
}
| {
success: false
message: string
}

View file

@ -0,0 +1,39 @@
import 'reflect-metadata'
import { Logger } from 'winston'
import { FileRemoverInterface } from '../../Services/FileRemoverInterface'
import { MarkFilesToBeRemoved } from './MarkFilesToBeRemoved'
describe('MarkFilesToBeRemoved', () => {
let fileRemover: FileRemoverInterface
let logger: Logger
const createUseCase = () => new MarkFilesToBeRemoved(fileRemover, logger)
beforeEach(() => {
fileRemover = {} as jest.Mocked<FileRemoverInterface>
fileRemover.markFilesToBeRemoved = jest.fn()
logger = {} as jest.Mocked<Logger>
logger.debug = jest.fn()
logger.error = jest.fn()
logger.warn = jest.fn()
})
it('should mark files for being removed', async () => {
expect(await createUseCase().execute({ userUuid: '1-2-3' })).toEqual({ success: true })
expect(fileRemover.markFilesToBeRemoved).toHaveBeenCalledWith('1-2-3')
})
it('should indicate if marking files for being removed goes wrong', async () => {
fileRemover.markFilesToBeRemoved = jest.fn().mockImplementation(() => {
throw new Error('Oops')
})
expect(await createUseCase().execute({ userUuid: '1-2-3' })).toEqual({
success: false,
message: 'Could not mark resources for removal',
})
})
})

View file

@ -0,0 +1,36 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { FileRemoverInterface } from '../../Services/FileRemoverInterface'
import { UseCaseInterface } from '../UseCaseInterface'
import { MarkFilesToBeRemovedDTO } from './MarkFilesToBeRemovedDTO'
import { MarkFilesToBeRemovedResponse } from './MarkFilesToBeRemovedResponse'
@injectable()
export class MarkFilesToBeRemoved implements UseCaseInterface {
constructor(
@inject(TYPES.FileRemover) private fileRemover: FileRemoverInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: MarkFilesToBeRemovedDTO): Promise<MarkFilesToBeRemovedResponse> {
try {
this.logger.debug(`Marking files for later removal for user: ${dto.userUuid}`)
const filesRemoved = await this.fileRemover.markFilesToBeRemoved(dto.userUuid)
return {
success: true,
filesRemoved,
}
} catch (error) {
this.logger.error(`Could not mark resources for removal: ${dto.userUuid} - ${(error as Error).message}`)
return {
success: false,
message: 'Could not mark resources for removal',
}
}
}
}

View file

@ -0,0 +1,5 @@
import { Uuid } from '@standardnotes/common'
export type MarkFilesToBeRemovedDTO = {
userUuid: Uuid
}

View file

@ -0,0 +1,11 @@
import { RemovedFileDescription } from '../../File/RemovedFileDescription'
export type MarkFilesToBeRemovedResponse =
| {
success: true
filesRemoved: Array<RemovedFileDescription>
}
| {
success: false
message: string
}

View file

@ -0,0 +1,63 @@
import 'reflect-metadata'
import { DomainEventPublisherInterface, FileRemovedEvent } from '@standardnotes/domain-events'
import { Logger } from 'winston'
import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface'
import { RemoveFile } from './RemoveFile'
import { FileRemoverInterface } from '../../Services/FileRemoverInterface'
describe('RemoveFile', () => {
let fileRemover: FileRemoverInterface
let domainEventPublisher: DomainEventPublisherInterface
let domainEventFactory: DomainEventFactoryInterface
let logger: Logger
const createUseCase = () => new RemoveFile(fileRemover, domainEventPublisher, domainEventFactory, logger)
beforeEach(() => {
fileRemover = {} as jest.Mocked<FileRemoverInterface>
fileRemover.remove = jest.fn().mockReturnValue(413)
domainEventPublisher = {} as jest.Mocked<DomainEventPublisherInterface>
domainEventPublisher.publish = jest.fn()
domainEventFactory = {} as jest.Mocked<DomainEventFactoryInterface>
domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked<FileRemovedEvent>)
logger = {} as jest.Mocked<Logger>
logger.debug = jest.fn()
logger.error = jest.fn()
logger.warn = jest.fn()
})
it('should indicate of an error in removing fails', async () => {
fileRemover.remove = jest.fn().mockImplementation(() => {
throw new Error('oops')
})
expect(
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
regularSubscriptionUuid: '3-4-5',
}),
).toEqual({
success: false,
message: 'Could not remove resource',
})
expect(domainEventPublisher.publish).not.toHaveBeenCalled()
})
it('should remove a file', async () => {
await createUseCase().execute({
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
regularSubscriptionUuid: '3-4-5',
})
expect(fileRemover.remove).toHaveBeenCalledWith('1-2-3/2-3-4')
expect(domainEventPublisher.publish).toHaveBeenCalled()
})
})

View file

@ -0,0 +1,51 @@
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface'
import { FileRemoverInterface } from '../../Services/FileRemoverInterface'
import { UseCaseInterface } from '../UseCaseInterface'
import { RemoveFileDTO } from './RemoveFileDTO'
import { RemoveFileResponse } from './RemoveFileResponse'
@injectable()
export class RemoveFile implements UseCaseInterface {
constructor(
@inject(TYPES.FileRemover) private fileRemover: FileRemoverInterface,
@inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
@inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: RemoveFileDTO): Promise<RemoveFileResponse> {
try {
this.logger.debug(`Removing file: ${dto.resourceRemoteIdentifier}`)
const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}`
const removedFileSize = await this.fileRemover.remove(filePath)
await this.domainEventPublisher.publish(
this.domainEventFactory.createFileRemovedEvent({
userUuid: dto.userUuid,
filePath: `${dto.userUuid}/${dto.resourceRemoteIdentifier}`,
fileName: dto.resourceRemoteIdentifier,
fileByteSize: removedFileSize,
regularSubscriptionUuid: dto.regularSubscriptionUuid,
}),
)
return {
success: true,
}
} catch (error) {
this.logger.error(`Could not remove resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`)
return {
success: false,
message: 'Could not remove resource',
}
}
}
}

View file

@ -0,0 +1,7 @@
import { Uuid } from '@standardnotes/common'
export type RemoveFileDTO = {
userUuid: Uuid
resourceRemoteIdentifier: string
regularSubscriptionUuid: Uuid
}

View file

@ -0,0 +1,8 @@
export type RemoveFileResponse =
| {
success: true
}
| {
success: false
message: string
}

View file

@ -0,0 +1,48 @@
import 'reflect-metadata'
import { Readable } from 'stream'
import { Logger } from 'winston'
import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface'
import { StreamDownloadFile } from './StreamDownloadFile'
describe('StreamDownloadFile', () => {
let fileDownloader: FileDownloaderInterface
let logger: Logger
const createUseCase = () => new StreamDownloadFile(fileDownloader, logger)
beforeEach(() => {
fileDownloader = {} as jest.Mocked<FileDownloaderInterface>
fileDownloader.createDownloadStream = jest.fn().mockReturnValue(new Readable())
logger = {} as jest.Mocked<Logger>
logger.error = jest.fn()
})
it('should stream download file contents from S3', async () => {
const result = await createUseCase().execute({
userUuid: '2-3-4',
resourceRemoteIdentifier: '1-2-3',
startRange: 0,
endRange: 200,
})
expect(result.success).toBeTruthy()
})
it('should not stream download file contents from S3 if it fails', async () => {
fileDownloader.createDownloadStream = jest.fn().mockImplementation(() => {
throw new Error('oops')
})
const result = await createUseCase().execute({
userUuid: '2-3-4',
resourceRemoteIdentifier: '1-2-3',
startRange: 0,
endRange: 200,
})
expect(result.success).toBeFalsy()
})
})

View file

@ -0,0 +1,39 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface'
import { UseCaseInterface } from '../UseCaseInterface'
import { StreamDownloadFileDTO } from './StreamDownloadFileDTO'
import { StreamDownloadFileResponse } from './StreamDownloadFileResponse'
@injectable()
export class StreamDownloadFile implements UseCaseInterface {
constructor(
@inject(TYPES.FileDownloader) private fileDownloader: FileDownloaderInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: StreamDownloadFileDTO): Promise<StreamDownloadFileResponse> {
try {
const readStream = this.fileDownloader.createDownloadStream(
`${dto.userUuid}/${dto.resourceRemoteIdentifier}`,
dto.startRange,
dto.endRange,
)
return {
success: true,
readStream,
}
} catch (error) {
this.logger.error(
`Could not create a download stream for resource: ${dto.userUuid}/${dto.resourceRemoteIdentifier}`,
)
return {
success: false,
message: 'Could not create download stream',
}
}
}
}

View file

@ -0,0 +1,6 @@
export type StreamDownloadFileDTO = {
userUuid: string
resourceRemoteIdentifier: string
startRange: number
endRange: number
}

View file

@ -0,0 +1,11 @@
import { Readable } from 'stream'
export type StreamDownloadFileResponse =
| {
success: true
readStream: Readable
}
| {
success: false
message: string
}

View file

@ -0,0 +1,84 @@
import 'reflect-metadata'
import { Logger } from 'winston'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
import { UploadFileChunk } from './UploadFileChunk'
describe('UploadFileChunk', () => {
let fileUploader: FileUploaderInterface
let uploadRepository: UploadRepositoryInterface
let logger: Logger
const createUseCase = () => new UploadFileChunk(fileUploader, uploadRepository, logger)
beforeEach(() => {
fileUploader = {} as jest.Mocked<FileUploaderInterface>
fileUploader.uploadFileChunk = jest.fn().mockReturnValue('ETag123')
uploadRepository = {} as jest.Mocked<UploadRepositoryInterface>
uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue('123')
uploadRepository.storeUploadChunkResult = jest.fn()
logger = {} as jest.Mocked<Logger>
logger.debug = jest.fn()
logger.error = jest.fn()
logger.warn = jest.fn()
})
it('should not upload a data chunk to a non existing file upload session', async () => {
uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue(undefined)
await createUseCase().execute({
chunkId: 2,
data: new Uint8Array([123]),
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
expect(fileUploader.uploadFileChunk).not.toHaveBeenCalled()
expect(uploadRepository.storeUploadChunkResult).not.toHaveBeenCalled()
})
it('should indicate of an error in uploading the chunk', async () => {
uploadRepository.retrieveUploadSessionId = jest.fn().mockImplementation(() => {
throw new Error('oops')
})
expect(
await createUseCase().execute({
chunkId: 2,
data: new Uint8Array([123]),
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
}),
).toEqual({
success: false,
message: 'Could not upload file chunk',
})
expect(fileUploader.uploadFileChunk).not.toHaveBeenCalled()
expect(uploadRepository.storeUploadChunkResult).not.toHaveBeenCalled()
})
it('should upload a data chunk to an existing file upload session', async () => {
await createUseCase().execute({
chunkId: 2,
data: new Uint8Array([123]),
resourceRemoteIdentifier: '2-3-4',
userUuid: '1-2-3',
})
expect(fileUploader.uploadFileChunk).toHaveBeenCalledWith({
chunkId: 2,
data: new Uint8Array([123]),
filePath: '1-2-3/2-3-4',
uploadId: '123',
})
expect(uploadRepository.storeUploadChunkResult).toHaveBeenCalledWith('123', {
tag: 'ETag123',
chunkId: 2,
chunkSize: 1,
})
})
})

View file

@ -0,0 +1,64 @@
import { inject, injectable } from 'inversify'
import { Logger } from 'winston'
import TYPES from '../../../Bootstrap/Types'
import { UseCaseInterface } from '../UseCaseInterface'
import { UploadFileChunkDTO } from './UploadFileChunkDTO'
import { UploadFileChunkResponse } from './UploadFileChunkResponse'
import { FileUploaderInterface } from '../../Services/FileUploaderInterface'
import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface'
@injectable()
export class UploadFileChunk implements UseCaseInterface {
constructor(
@inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface,
@inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface,
@inject(TYPES.Logger) private logger: Logger,
) {}
async execute(dto: UploadFileChunkDTO): Promise<UploadFileChunkResponse> {
try {
this.logger.debug(
`Starting upload file chunk ${dto.chunkId} with ${dto.data.byteLength} bytes for resource: ${dto.resourceRemoteIdentifier}`,
)
const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}`
const uploadId = await this.uploadRepository.retrieveUploadSessionId(filePath)
if (uploadId === undefined) {
this.logger.warn(`Could not find upload session for file path: ${filePath}`)
return {
success: false,
message: 'Could not find upload session',
}
}
const uploadFileChunkETag = await this.fileUploader.uploadFileChunk({
uploadId,
data: dto.data,
chunkId: dto.chunkId,
filePath,
})
await this.uploadRepository.storeUploadChunkResult(uploadId, {
tag: uploadFileChunkETag,
chunkId: dto.chunkId,
chunkSize: dto.data.byteLength,
})
return {
success: true,
}
} catch (error) {
this.logger.error(
`Could not upload file chunk for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`,
)
return {
success: false,
message: 'Could not upload file chunk',
}
}
}
}

View file

@ -0,0 +1,8 @@
import { ChunkId } from '../../Upload/ChunkId'
export type UploadFileChunkDTO = {
data: Uint8Array
chunkId: ChunkId
userUuid: string
resourceRemoteIdentifier: string
}

View file

@ -0,0 +1,8 @@
export type UploadFileChunkResponse =
| {
success: true
}
| {
success: false
message: string
}

View file

@ -0,0 +1,3 @@
export interface UseCaseInterface {
execute(...args: any[]): Promise<Record<string, unknown>>
}

View file

@ -0,0 +1,19 @@
import { Readable } from 'stream'
import { createReadStream, promises } from 'fs'
import { inject, injectable } from 'inversify'
import { FileDownloaderInterface } from '../../Domain/Services/FileDownloaderInterface'
import TYPES from '../../Bootstrap/Types'
@injectable()
export class FSFileDownloader implements FileDownloaderInterface {
constructor(@inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string) {}
async getFileSize(filePath: string): Promise<number> {
return (await promises.stat(`${this.fileUploadPath}/${filePath}`)).size
}
createDownloadStream(filePath: string, startRange: number, endRange: number): Readable {
return createReadStream(`${this.fileUploadPath}/${filePath}`, { start: startRange, end: endRange })
}
}

View file

@ -0,0 +1,26 @@
import { inject, injectable } from 'inversify'
import { promises } from 'fs'
import { FileRemoverInterface } from '../../Domain/Services/FileRemoverInterface'
import { RemovedFileDescription } from '../../Domain/File/RemovedFileDescription'
import TYPES from '../../Bootstrap/Types'
@injectable()
export class FSFileRemover implements FileRemoverInterface {
constructor(@inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string) {}
async markFilesToBeRemoved(userUuid: string): Promise<Array<RemovedFileDescription>> {
await promises.rmdir(`${this.fileUploadPath}/${userUuid}`)
return []
}
async remove(filePath: string): Promise<number> {
const fullPath = `${this.fileUploadPath}/${filePath}`
const fileSize = (await promises.stat(fullPath)).size
await promises.rm(fullPath)
return fileSize
}
}

View file

@ -0,0 +1,67 @@
import { promises } from 'fs'
import { dirname } from 'path'
import { inject, injectable } from 'inversify'
import { FileUploaderInterface } from '../../Domain/Services/FileUploaderInterface'
import { UploadChunkResult } from '../../Domain/Upload/UploadChunkResult'
import { Logger } from 'winston'
import TYPES from '../../Bootstrap/Types'
@injectable()
export class FSFileUploader implements FileUploaderInterface {
private inMemoryChunks: Map<string, Map<number, Uint8Array>>
constructor(
@inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string,
@inject(TYPES.Logger) private logger: Logger,
) {
this.inMemoryChunks = new Map<string, Map<number, Uint8Array>>()
}
async uploadFileChunk(dto: {
uploadId: string
data: Uint8Array
filePath: string
chunkId: number
}): Promise<string> {
if (!this.inMemoryChunks.has(dto.uploadId)) {
this.inMemoryChunks.set(dto.uploadId, new Map<number, Uint8Array>())
}
const fileChunks = this.inMemoryChunks.get(dto.uploadId) as Map<number, Uint8Array>
this.logger.debug(`FS storing file chunk ${dto.chunkId} in memory for ${dto.uploadId}`)
fileChunks.set(dto.chunkId, dto.data)
return dto.uploadId
}
async finishUploadSession(
uploadId: string,
filePath: string,
_uploadChunkResults: UploadChunkResult[],
): Promise<void> {
this.logger.debug(`FS finishing upload for ${uploadId}`)
const fileChunks = this.inMemoryChunks.get(uploadId)
if (!fileChunks) {
throw new Error(`Could not find chunks for upload ${uploadId}`)
}
const orderedKeys = [...fileChunks.keys()].sort((a, b) => a - b)
for (const orderedKey of orderedKeys) {
await promises.appendFile(`${this.fileUploadPath}/${filePath}`, fileChunks.get(orderedKey) as Uint8Array)
}
this.inMemoryChunks.delete(uploadId)
}
async createUploadSession(filePath: string): Promise<string> {
const fullPath = `${this.fileUploadPath}/${filePath}`
await promises.mkdir(dirname(fullPath), { recursive: true })
return fullPath
}
}

View file

@ -0,0 +1,60 @@
import 'reflect-metadata'
import * as IORedis from 'ioredis'
import { RedisUploadRepository } from './RedisUploadRepository'
describe('RedisUploadRepository', () => {
let redisClient: IORedis.Redis
const createRepository = () => new RedisUploadRepository(redisClient)
beforeEach(() => {
redisClient = {} as jest.Mocked<IORedis.Redis>
redisClient.setex = jest.fn()
redisClient.get = jest.fn().mockReturnValue('123')
redisClient.lpush = jest.fn()
redisClient.expire = jest.fn()
redisClient.lrange = jest.fn().mockReturnValue(['{"tag":"123","chunkId":3}', '{"tag":"123","chunkId":1}'])
})
it('should store an upload session', async () => {
await createRepository().storeUploadSession('1-2-3/2-3-4', '123')
expect(redisClient.setex).toHaveBeenCalledWith('upload-session:1-2-3/2-3-4', 7200, '123')
})
it('should retrieve an upload session id', async () => {
await createRepository().retrieveUploadSessionId('1-2-3/2-3-4')
expect(redisClient.get).toHaveBeenCalledWith('upload-session:1-2-3/2-3-4')
})
it('should return undefied on an non existing upload session', async () => {
redisClient.get = jest.fn().mockReturnValue(null)
expect(await createRepository().retrieveUploadSessionId('1-2-3/2-3-4')).toBeUndefined()
})
it('should store and upload chunk result', async () => {
await createRepository().storeUploadChunkResult('123', { tag: '123', chunkId: 3, chunkSize: 100 })
expect(redisClient.lpush).toHaveBeenCalledWith('upload-chunks:123', '{"tag":"123","chunkId":3,"chunkSize":100}')
expect(redisClient.expire).toHaveBeenCalledWith('upload-chunks:123', 7200)
})
it('should retrieve upload chunk results', async () => {
expect(await createRepository().retrieveUploadChunkResults('123')).toEqual([
{
tag: '123',
chunkId: 1,
},
{
tag: '123',
chunkId: 3,
},
])
expect(redisClient.lrange).toHaveBeenCalledWith('upload-chunks:123', 0, -1)
})
})

View file

@ -0,0 +1,50 @@
import * as IORedis from 'ioredis'
import { inject, injectable } from 'inversify'
import TYPES from '../../Bootstrap/Types'
import { UploadRepositoryInterface } from '../../Domain/Upload/UploadRepositoryInterface'
import { UploadChunkResult } from '../../Domain/Upload/UploadChunkResult'
@injectable()
export class RedisUploadRepository implements UploadRepositoryInterface {
private readonly UPLOAD_SESSION_PREFIX = 'upload-session'
private readonly UPLOAD_CHUNKS_PREFIX = 'upload-chunks'
private readonly UPLOAD_SESSION_DEFAULT_TTL = 7200
constructor(@inject(TYPES.Redis) private redisClient: IORedis.Redis) {}
async storeUploadSession(filePath: string, uploadId: string): Promise<void> {
await this.redisClient.setex(`${this.UPLOAD_SESSION_PREFIX}:${filePath}`, this.UPLOAD_SESSION_DEFAULT_TTL, uploadId)
}
async retrieveUploadSessionId(filePath: string): Promise<string | undefined> {
const uploadId = await this.redisClient.get(`${this.UPLOAD_SESSION_PREFIX}:${filePath}`)
if (!uploadId) {
return undefined
}
return uploadId
}
async storeUploadChunkResult(uploadId: string, uploadChunkResult: UploadChunkResult): Promise<void> {
await this.redisClient.lpush(`${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`, JSON.stringify(uploadChunkResult))
await this.redisClient.expire(`${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`, this.UPLOAD_SESSION_DEFAULT_TTL)
}
async retrieveUploadChunkResults(uploadId: string): Promise<UploadChunkResult[]> {
const stringifiedUploadChunkResults = await this.redisClient.lrange(
`${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`,
0,
-1,
)
const uploadChunksResults: UploadChunkResult[] = []
for (const stringifiedUploadChunkResult of stringifiedUploadChunkResults) {
uploadChunksResults.push(JSON.parse(stringifiedUploadChunkResult))
}
const sortedResults = uploadChunksResults.sort((a, b) => {
return a.chunkId - b.chunkId
})
return sortedResults
}
}

View file

@ -0,0 +1,33 @@
import 'reflect-metadata'
import * as AWS from 'aws-sdk'
import { Readable } from 'stream'
import { S3FileDownloader } from './S3FileDownloader'
describe('S3FileDownloader', () => {
let s3Client: AWS.S3
const s3BuckeName = 'test'
const createService = () => new S3FileDownloader(s3Client, s3BuckeName)
beforeEach(() => {
const awsRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.GetObjectOutput, AWS.AWSError>>
awsRequest.createReadStream = jest.fn().mockReturnValue(new Readable())
s3Client = {} as jest.Mocked<AWS.S3>
s3Client.getObject = jest.fn().mockReturnValue(awsRequest)
const headRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.HeadObjectOutput, AWS.AWSError>>
headRequest.promise = jest.fn().mockReturnValue(Promise.resolve({ ContentLength: 200 }))
s3Client.headObject = jest.fn().mockReturnValue(headRequest)
})
it('should create a download stream', () => {
expect(createService().createDownloadStream('test.txt', 0, 200)).toBeInstanceOf(Readable)
})
it('should get file size', async () => {
expect(await createService().getFileSize('test.txt')).toEqual(200)
})
})

View file

@ -0,0 +1,32 @@
import { inject, injectable } from 'inversify'
import * as AWS from 'aws-sdk'
import { Readable } from 'stream'
import TYPES from '../../Bootstrap/Types'
import { FileDownloaderInterface } from '../../Domain/Services/FileDownloaderInterface'
@injectable()
export class S3FileDownloader implements FileDownloaderInterface {
constructor(@inject(TYPES.S3) private s3Client: AWS.S3, @inject(TYPES.S3_BUCKET_NAME) private s3BuckeName: string) {}
createDownloadStream(filePath: string, startRange: number, endRange: number): Readable {
return this.s3Client
.getObject({
Bucket: this.s3BuckeName,
Key: filePath,
Range: `bytes=${startRange}-${endRange}`,
})
.createReadStream()
}
async getFileSize(filePath: string): Promise<number> {
const head = await this.s3Client
.headObject({
Bucket: this.s3BuckeName,
Key: filePath,
})
.promise()
return head.ContentLength as number
}
}

View file

@ -0,0 +1,113 @@
import 'reflect-metadata'
import * as AWS from 'aws-sdk'
import { S3FileRemover } from './S3FileRemover'
describe('S3FileRemover', () => {
let s3Client: AWS.S3
const s3BuckeName = 'test'
const createService = () => new S3FileRemover(s3Client, s3BuckeName)
beforeEach(() => {
const deleteObjectRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.DeleteObjectRequest, AWS.AWSError>>
deleteObjectRequest.promise = jest.fn()
s3Client = {} as jest.Mocked<AWS.S3>
s3Client.deleteObject = jest.fn().mockReturnValue(deleteObjectRequest)
const headRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.HeadObjectOutput, AWS.AWSError>>
headRequest.promise = jest.fn().mockReturnValue(Promise.resolve({ ContentLength: 200 }))
s3Client.headObject = jest.fn().mockReturnValue(headRequest)
})
it('should delete a file', async () => {
expect(await createService().remove('123/234')).toEqual(200)
expect(s3Client.deleteObject).toHaveBeenCalledWith({
Bucket: 'test',
Key: '123/234',
})
})
it('should mark user files for removal', async () => {
const copyObjectRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.CopyObjectRequest, AWS.AWSError>>
copyObjectRequest.promise = jest.fn()
s3Client.copyObject = jest.fn().mockReturnValue(copyObjectRequest)
const listObjectsRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.ListObjectsV2Request, AWS.AWSError>>
listObjectsRequest.promise = jest.fn().mockReturnValue({
Contents: [
{
Key: '123/2-3-4',
Size: 123,
},
{
Key: '123/3-4-5',
Size: 234,
},
{},
],
} as jest.Mocked<AWS.S3.ListObjectsV2Output>)
s3Client.listObjectsV2 = jest.fn().mockReturnValue(listObjectsRequest)
expect(await createService().markFilesToBeRemoved('123')).toEqual([
{
fileByteSize: 123,
fileName: '2-3-4',
filePath: '123/2-3-4',
userUuid: '123',
},
{
fileByteSize: 234,
fileName: '3-4-5',
filePath: '123/3-4-5',
userUuid: '123',
},
])
expect(s3Client.copyObject).toHaveBeenCalledTimes(2)
expect(s3Client.copyObject).toHaveBeenNthCalledWith(1, {
Bucket: 'test',
CopySource: 'test/123/2-3-4',
Key: 'expiration-chamber/123/2-3-4',
StorageClass: 'DEEP_ARCHIVE',
})
expect(s3Client.copyObject).toHaveBeenNthCalledWith(2, {
Bucket: 'test',
CopySource: 'test/123/3-4-5',
Key: 'expiration-chamber/123/3-4-5',
StorageClass: 'DEEP_ARCHIVE',
})
expect(s3Client.deleteObject).toHaveBeenCalledTimes(2)
expect(s3Client.deleteObject).toHaveBeenNthCalledWith(1, {
Bucket: 'test',
Key: '123/2-3-4',
})
expect(s3Client.deleteObject).toHaveBeenNthCalledWith(2, {
Bucket: 'test',
Key: '123/3-4-5',
})
})
it('should not mark user files for removal if there none', async () => {
const copyObjectRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.CopyObjectRequest, AWS.AWSError>>
copyObjectRequest.promise = jest.fn()
s3Client.copyObject = jest.fn().mockReturnValue(copyObjectRequest)
const listObjectsRequest = {} as jest.Mocked<AWS.Request<AWS.S3.Types.ListObjectsV2Request, AWS.AWSError>>
listObjectsRequest.promise = jest.fn().mockReturnValue({} as jest.Mocked<AWS.S3.ListObjectsV2Output>)
s3Client.listObjectsV2 = jest.fn().mockReturnValue(listObjectsRequest)
expect(await createService().markFilesToBeRemoved('123')).toEqual([])
expect(s3Client.copyObject).not.toHaveBeenCalled()
expect(s3Client.deleteObject).not.toHaveBeenCalled()
})
})

View file

@ -0,0 +1,79 @@
import { inject, injectable } from 'inversify'
import * as AWS from 'aws-sdk'
import TYPES from '../../Bootstrap/Types'
import { FileRemoverInterface } from '../../Domain/Services/FileRemoverInterface'
import { RemovedFileDescription } from '../../Domain/File/RemovedFileDescription'
@injectable()
export class S3FileRemover implements FileRemoverInterface {
constructor(@inject(TYPES.S3) private s3Client: AWS.S3, @inject(TYPES.S3_BUCKET_NAME) private s3BuckeName: string) {}
async markFilesToBeRemoved(userUuid: string): Promise<Array<RemovedFileDescription>> {
const filesResponse = await this.s3Client
.listObjectsV2({
Bucket: this.s3BuckeName,
Prefix: `${userUuid}/`,
})
.promise()
if (filesResponse.Contents === undefined) {
return []
}
const files = filesResponse.Contents
const removedFileDescriptions: Array<RemovedFileDescription> = []
for (const file of files) {
if (file.Key === undefined) {
continue
}
await this.s3Client
.copyObject({
Bucket: this.s3BuckeName,
Key: `expiration-chamber/${file.Key}`,
CopySource: `${this.s3BuckeName}/${file.Key}`,
StorageClass: 'DEEP_ARCHIVE',
})
.promise()
await this.s3Client
.deleteObject({
Bucket: this.s3BuckeName,
Key: file.Key,
})
.promise()
removedFileDescriptions.push({
fileByteSize: file.Size as number,
fileName: file.Key.replace(`${userUuid}/`, ''),
filePath: file.Key,
userUuid,
})
}
return removedFileDescriptions
}
async remove(filePath: string): Promise<number> {
const head = await this.s3Client
.headObject({
Bucket: this.s3BuckeName,
Key: filePath,
})
.promise()
const fileSize = head.ContentLength as number
await this.s3Client
.deleteObject({
Bucket: this.s3BuckeName,
Key: filePath,
})
.promise()
return fileSize
}
}

Some files were not shown because too many files have changed in this diff Show more