Compare commits

..

1 commit

Author SHA1 Message Date
Karol Sójko
7a1d3f5266
fix: unplug node-gyp based deps 2023-07-04 10:23:55 +02:00
4666 changed files with 189863 additions and 70873 deletions

7
.github/ci.env vendored
View file

@ -4,14 +4,13 @@ DB_USERNAME=std_notes_user
DB_PASSWORD=changeme123
DB_DATABASE=standard_notes_db
DB_PORT=3306
DB_DEBUG_LEVEL=all
DB_SQLITE_DATABASE_PATH=standard_notes_db
REDIS_PORT=6379
REDIS_HOST=cache
AUTH_SERVER_ACCESS_TOKEN_AGE=4
AUTH_SERVER_REFRESH_TOKEN_AGE=10
AUTH_SERVER_EPHEMERAL_SESSION_AGE=300
SYNCING_SERVER_REVISIONS_FREQUENCY=2
SYNCING_SERVER_REVISIONS_FREQUENCY=5
AUTH_SERVER_LOG_LEVEL=debug
SYNCING_SERVER_LOG_LEVEL=debug
FILES_SERVER_LOG_LEVEL=debug
@ -26,7 +25,3 @@ MYSQL_ROOT_PASSWORD=changeme123
AUTH_JWT_SECRET=f95259c5e441f5a4646d76422cfb3df4c4488842901aa50b6c51b8be2e0040e9
AUTH_SERVER_ENCRYPTION_SERVER_KEY=1087415dfde3093797f9a7ca93a49e7d7aa1861735eb0d32aae9c303b8c3d060
VALET_TOKEN_SECRET=4b886819ebe1e908077c6cae96311b48a8416bd60cc91c03060e15bdf6b30d1f
SYNCING_SERVER_CONTENT_SIZE_TRANSFER_LIMIT=100000
HTTP_REQUEST_PAYLOAD_LIMIT_MEGABYTES=1

View file

@ -9,134 +9,91 @@ updates:
directory: "/"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/analytics"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/api-gateway"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/auth"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/common"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/domain-core"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/domain-events"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/domain-events-infra"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/event-store"
schedule:
interval: "daily"
- package-ecosystem: "npm"
directory: "/packages/files"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/home-server"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/predicates"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/revisions"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/scheduler"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/security"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/settings"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/sncrypto-node"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/syncing-server"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/time"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/websockets"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "github-actions"
directory: "/"

View file

@ -20,3 +20,19 @@ jobs:
deploy_web: false
package_path: packages/analytics
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_ANALYTICS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -20,3 +20,17 @@ jobs:
deploy_worker: false
package_path: packages/api-gateway
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_API_GATEWAY_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -19,3 +19,27 @@ jobs:
workspace_name: "@standardnotes/auth-server"
package_path: packages/auth
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_AUTH_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_AUTH_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -24,7 +24,7 @@ jobs:
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -27,18 +27,23 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Create Bundle Dir
id: bundle-dir
run: echo "temp_dir=$(mktemp -d -t ${{ inputs.service_name }}-${{ github.sha }}-XXXXXXX)" >> $GITHUB_OUTPUT
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
${{ steps.bundle-dir.outputs.temp_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node
@ -52,16 +57,20 @@ jobs:
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build
run: yarn build ${{ inputs.package_path }}
- name: Bundle
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn workspace ${{ inputs.workspace_name }} bundle --no-compress --output-directory ${{ steps.bundle-dir.outputs.temp_dir }}
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@ -69,7 +78,7 @@ jobs:
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
uses: aws-actions/amazon-ecr-login@v1
- name: Set up QEMU
uses: docker/setup-qemu-action@master
@ -81,11 +90,11 @@ jobs:
uses: docker/setup-buildx-action@master
- name: Publish Docker image
uses: docker/build-push-action@v4
uses: docker/build-push-action@v3
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ${{ inputs.package_path }}/Dockerfile
context: ${{ steps.bundle-dir.outputs.temp_dir }}
file: ${{ steps.bundle-dir.outputs.temp_dir }}/${{ inputs.package_path }}/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |

View file

@ -7,20 +7,122 @@ on:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
secrets:
DOCKER_USERNAME:
required: true
DOCKER_PASSWORD:
required: true
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
jobs:
e2e-self-hosted:
uses: standardnotes/server/.github/workflows/e2e-self-hosted.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag }}
suite: ${{ inputs.suite }}
e2e:
name: (Docker) E2E Test Suite
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
steps:
- uses: actions/checkout@v3
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Run Server
run: docker compose -f docker-compose.ci.yml up -d
env:
DB_TYPE: mysql
CACHE_TYPE: redis
- name: Wait for server to start
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 1200000 -f http://localhost:9001/mocha/test.html
e2e-home-server:
uses: standardnotes/server/.github/workflows/e2e-home-server.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag }}
suite: ${{ inputs.suite }}
name: (Home Server) E2E Test Suite
strategy:
matrix:
db_type: [mysql, sqlite]
cache_type: [redis, memory]
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
cache:
image: redis
ports:
- 6379:6379
db:
image: mysql
ports:
- 3306:3306
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: standardnotes
MYSQL_USER: standardnotes
MYSQL_PASSWORD: standardnotes
steps:
- uses: actions/checkout@v3
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install Dependencies
run: yarn install --immutable
- name: Build
run: yarn build
- name: Copy dotenv file
run: cp packages/home-server/.env.sample packages/home-server/.env
- name: Fill in env variables
run: |
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
echo "REFRESH_TOKEN_AGE=7" >> packages/home-server/.env
echo "REVISIONS_FREQUENCY=5" >> packages/home-server/.env
echo "DB_HOST=db" >> packages/home-server/.env
echo "DB_PORT=3306" >> packages/home-server/.env
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
echo "REDIS_URL=redis://cache" >> packages/home-server/.env
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
- name: Run Server
run: nohup yarn workspace @standardnotes/home-server start &
env:
PORT: 3123
- name: Wait for server to start
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 1200000 -f http://localhost:9001/mocha/test.html?skip_paid_features=true

View file

@ -17,10 +17,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@ -35,7 +35,7 @@ jobs:
uses: docker/setup-buildx-action@master
- name: Publish Docker image
uses: docker/build-push-action@v4
uses: docker/build-push-action@v3
with:
builder: ${{ steps.buildx.outputs.name }}
context: .

View file

@ -42,26 +42,26 @@ jobs:
workspace_name: ${{ inputs.workspace_name }}
secrets: inherit
# deploy-web:
# if: ${{ inputs.deploy_web }}
deploy-web:
if: ${{ inputs.deploy_web }}
# needs: publish
needs: publish
# name: Deploy Web
# uses: standardnotes/server/.github/workflows/common-deploy.yml@main
# with:
# service_name: ${{ inputs.service_name }}
# docker_image: ${{ inputs.service_name }}:${{ github.sha }}
# secrets: inherit
name: Deploy Web
uses: standardnotes/server/.github/workflows/common-deploy.yml@main
with:
service_name: ${{ inputs.service_name }}
docker_image: ${{ inputs.service_name }}:${{ github.sha }}
secrets: inherit
# deploy-worker:
# if: ${{ inputs.deploy_worker }}
deploy-worker:
if: ${{ inputs.deploy_worker }}
# needs: publish
needs: publish
# name: Deploy Worker
# uses: standardnotes/server/.github/workflows/common-deploy.yml@main
# with:
# service_name: ${{ inputs.service_name }}-worker
# docker_image: ${{ inputs.service_name }}:${{ github.sha }}
# secrets: inherit
name: Deploy Worker
uses: standardnotes/server/.github/workflows/common-deploy.yml@main
with:
service_name: ${{ inputs.service_name }}-worker
docker_image: ${{ inputs.service_name }}:${{ github.sha }}
secrets: inherit

View file

@ -1,106 +0,0 @@
name: E2E Home Server Test Suite
on:
workflow_call:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
jobs:
e2e-home-server:
name: (Home Server) E2E Test Suite
strategy:
fail-fast: false
matrix:
db_type: [mysql, sqlite]
cache_type: [redis, memory]
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
cache:
image: redis
ports:
- 6379:6379
db:
image: mysql
ports:
- 3306:3306
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: standardnotes
MYSQL_USER: standardnotes
MYSQL_PASSWORD: standardnotes
steps:
- uses: actions/checkout@v4
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install Dependencies
run: yarn install --immutable
- name: Build
run: yarn build
- name: Copy dotenv file
run: cp packages/home-server/.env.sample packages/home-server/.env
- name: Fill in env variables
run: |
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
echo "CONTENT_SIZE_TRANSFER_LIMIT=100000" >> packages/home-server/.env
echo "HTTP_REQUEST_PAYLOAD_LIMIT_MEGABYTES=1" >> packages/home-server/.env
echo "DB_HOST=localhost" >> packages/home-server/.env
echo "DB_PORT=3306" >> packages/home-server/.env
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
echo "E2E_TESTING=true" >> packages/home-server/.env
- name: Run Server
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
env:
PORT: 3123
- name: Wait for server to start
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
- name: Archive failed run logs
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: home-server-failure-logs-${{ inputs.suite }}-${{ matrix.db_type }}-${{ matrix.cache_type }}
retention-days: 5
path: |
logs/output.log

View file

@ -1,70 +0,0 @@
name: E2E Self Hosted Test Suite
on:
workflow_call:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
jobs:
e2e:
name: (Self Hosting) E2E Test Suite
strategy:
fail-fast: false
matrix:
service_proxy_type: [http, grpc]
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
steps:
- uses: actions/checkout@v4
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Run Server
run: docker compose -f docker-compose.ci.yml up -d
env:
DB_TYPE: mysql
CACHE_TYPE: redis
SERVICE_PROXY_TYPE: ${{ matrix.service_proxy_type }}
- name: Output Server Logs to File
run: docker compose -f docker-compose.ci.yml logs -f > logs/docker-compose.log 2>&1 &
env:
DB_TYPE: mysql
CACHE_TYPE: redis
SERVICE_PROXY_TYPE: ${{ matrix.service_proxy_type }}
- name: Wait for server to start
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
- name: Archive failed run logs
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: self-hosted-failure-logs-${{ inputs.suite }}
retention-days: 5
path: |
logs/*.err
logs/*.log

View file

@ -1,6 +1,4 @@
name: E2E Test Suite
run-name: E2E ${{ inputs.suite }} Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
name: E2E Test Suite On Self Hosted Server
on:
schedule:
@ -11,18 +9,6 @@ on:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
author:
type: string
default: unknown
description: The author that triggered the workflow
ref_name:
type: string
default: unknown
description: The ref name from which the workflow was triggered
jobs:
e2e:
@ -30,4 +16,4 @@ jobs:
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag || 'latest' }}
suite: ${{ inputs.suite || 'all' }}
secrets: inherit

View file

@ -19,3 +19,27 @@ jobs:
workspace_name: "@standardnotes/files-server"
package_path: packages/files
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -9,7 +9,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -26,7 +26,7 @@ jobs:
node-version-file: '.nvmrc'
- name: Install
run: yarn install
run: yarn install --immutable
- name: Build
run: yarn build
@ -37,7 +37,7 @@ jobs:
needs: build
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -54,7 +54,7 @@ jobs:
node-version-file: '.nvmrc'
- name: Install
run: yarn install
run: yarn install --immutable
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
@ -69,7 +69,7 @@ jobs:
needs: build
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -86,7 +86,7 @@ jobs:
node-version-file: '.nvmrc'
- name: Install
run: yarn install
run: yarn install --immutable
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
@ -95,18 +95,9 @@ jobs:
- name: Test
run: yarn test
e2e-base:
e2e:
needs: build
name: E2E Base Suite
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'base'
e2e-vaults:
needs: build
name: E2E Vaults Suite
name: E2E
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'vaults'
secrets: inherit

View file

@ -4,15 +4,12 @@ on:
push:
branches: [ main ]
permissions:
id-token: write
jobs:
build:
if: contains(github.event.head_commit.message, 'chore(release)') == false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -40,7 +37,7 @@ jobs:
needs: build
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -72,7 +69,7 @@ jobs:
needs: build
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
@ -98,38 +95,26 @@ jobs:
- name: Test
run: yarn test
# e2e-base:
# needs: build
# name: E2E Base Suite
# uses: standardnotes/server/.github/workflows/common-e2e.yml@main
# with:
# snjs_image_tag: 'latest'
# suite: 'base'
# e2e-vaults:
# needs: build
# name: E2E Vaults Suite
# uses: standardnotes/server/.github/workflows/common-e2e.yml@main
# with:
# snjs_image_tag: 'latest'
# suite: 'vaults'
e2e:
needs: build
name: E2E
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
secrets: inherit
publish-self-hosting:
# needs: [ test, lint, e2e-base, e2e-vaults ]
needs: [ test, lint ]
needs: [ test, lint, e2e ]
name: Publish Self Hosting Docker Image
uses: standardnotes/server/.github/workflows/common-self-hosting.yml@main
secrets: inherit
publish-services:
# needs: [ test, lint, e2e-base, e2e-vaults ]
needs: [ test, lint ]
needs: [ test, lint, e2e ]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
token: ${{ secrets.CI_PAT_TOKEN }}
fetch-depth: 0
@ -148,7 +133,7 @@ jobs:
git config --global user.email "ci@standardnotes.com"
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.PASSPHRASE }}

View file

@ -19,3 +19,27 @@ jobs:
workspace_name: "@standardnotes/revisions-server"
package_path: packages/revisions
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_REVISIONS_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_REVISIONS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -20,3 +20,19 @@ jobs:
deploy_web: false
package_path: packages/scheduler
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SCHEDULER_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -19,3 +19,27 @@ jobs:
workspace_name: "@standardnotes/syncing-server"
package_path: packages/syncing-server
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View file

@ -19,3 +19,27 @@ jobs:
workspace_name: "@standardnotes/websockets-server"
package_path: packages/websockets
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WEBSOCKETS_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WEBSOCKETS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

4
.gitignore vendored
View file

@ -4,6 +4,7 @@
dist
coverage
.env
newrelic_agent.log
.yarn/*
!.yarn/cache
@ -12,8 +13,7 @@ coverage
!.yarn/releases
!.yarn/sdks
!.yarn/versions
yarn.build-error.log
!.yarn/unplugged
packages/files/uploads/*
!packages/files/uploads/.gitkeep

2
.nvmrc
View file

@ -1 +1 @@
20.10.0
20.2.0

9162
.pnp.cjs generated

File diff suppressed because one or more lines are too long

223
.pnp.loader.mjs generated
View file

@ -1,9 +1,9 @@
import fs from 'fs';
import { URL as URL$1, fileURLToPath, pathToFileURL } from 'url';
import fs from 'fs';
import path from 'path';
import moduleExports, { Module } from 'module';
import { createHash } from 'crypto';
import { EOL } from 'os';
import moduleExports, { isBuiltin } from 'module';
import assert from 'assert';
const SAFE_TIME = 456789e3;
@ -16,16 +16,14 @@ const PortablePath = {
const npath = Object.create(path);
const ppath = Object.create(path.posix);
npath.cwd = () => process.cwd();
ppath.cwd = process.platform === `win32` ? () => toPortablePath(process.cwd()) : process.cwd;
if (process.platform === `win32`) {
ppath.resolve = (...segments) => {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
return path.posix.resolve(...segments);
} else {
return path.posix.resolve(ppath.cwd(), ...segments);
}
};
}
ppath.cwd = () => toPortablePath(process.cwd());
ppath.resolve = (...segments) => {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
return path.posix.resolve(...segments);
} else {
return path.posix.resolve(ppath.cwd(), ...segments);
}
};
const contains = function(pathUtils, from, to) {
from = pathUtils.normalize(from);
to = pathUtils.normalize(to);
@ -39,13 +37,17 @@ const contains = function(pathUtils, from, to) {
return null;
}
};
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
npath.contains = (from, to) => contains(npath, from, to);
ppath.contains = (from, to) => contains(ppath, from, to);
const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/;
const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/;
const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/;
const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/;
function fromPortablePathWin32(p) {
function fromPortablePath(p) {
if (process.platform !== `win32`)
return p;
let portablePathMatch, uncPortablePathMatch;
if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP))
p = portablePathMatch[1];
@ -55,7 +57,9 @@ function fromPortablePathWin32(p) {
return p;
return p.replace(/\//g, `\\`);
}
function toPortablePathWin32(p) {
function toPortablePath(p) {
if (process.platform !== `win32`)
return p;
p = p.replace(/\\/g, `/`);
let windowsPathMatch, uncWindowsPathMatch;
if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP))
@ -64,10 +68,6 @@ function toPortablePathWin32(p) {
p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`;
return p;
}
const toPortablePath = process.platform === `win32` ? toPortablePathWin32 : (p) => p;
const fromPortablePath = process.platform === `win32` ? fromPortablePathWin32 : (p) => p;
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
function convertPath(targetPathUtils, sourcePath) {
return targetPathUtils === npath ? fromPortablePath(sourcePath) : toPortablePath(sourcePath);
}
@ -89,6 +89,7 @@ async function copyPromise(destinationFs, destination, sourceFs, source, opts) {
}));
}
async function copyImpl(prelayout, postlayout, destinationFs, destination, sourceFs, source, opts) {
var _a, _b, _c;
const destinationStat = opts.didParentExist ? await maybeLStat(destinationFs, destination) : null;
const sourceStat = await sourceFs.lstatPromise(source);
const { atime, mtime } = opts.stableTime ? { atime: defaultTime, mtime: defaultTime } : sourceStat;
@ -114,8 +115,8 @@ async function copyImpl(prelayout, postlayout, destinationFs, destination, sourc
throw new Error(`Unsupported file type (${sourceStat.mode})`);
}
}
if (opts.linkStrategy?.type !== `HardlinkFromIndex` || !sourceStat.isFile()) {
if (updated || destinationStat?.mtime?.getTime() !== mtime.getTime() || destinationStat?.atime?.getTime() !== atime.getTime()) {
if (((_a = opts.linkStrategy) == null ? void 0 : _a.type) !== `HardlinkFromIndex` || !sourceStat.isFile()) {
if (updated || ((_b = destinationStat == null ? void 0 : destinationStat.mtime) == null ? void 0 : _b.getTime()) !== mtime.getTime() || ((_c = destinationStat == null ? void 0 : destinationStat.atime) == null ? void 0 : _c.getTime()) !== atime.getTime()) {
postlayout.push(() => destinationFs.lutimesPromise(destination, atime, mtime));
updated = true;
}
@ -185,7 +186,7 @@ async function copyFileViaIndex(prelayout, postlayout, destinationFs, destinatio
let indexStat = await maybeLStat(destinationFs, indexPath);
if (destinationStat) {
const isDestinationHardlinkedFromIndex = indexStat && destinationStat.dev === indexStat.dev && destinationStat.ino === indexStat.ino;
const isIndexModified = indexStat?.mtimeMs !== defaultTimeMs;
const isIndexModified = (indexStat == null ? void 0 : indexStat.mtimeMs) !== defaultTimeMs;
if (isDestinationHardlinkedFromIndex) {
if (isIndexModified && linkStrategy.autoRepair) {
atomicBehavior = 0 /* Lock */;
@ -255,7 +256,8 @@ async function copyFileDirect(prelayout, postlayout, destinationFs, destination,
return true;
}
async function copyFile(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts) {
if (opts.linkStrategy?.type === `HardlinkFromIndex`) {
var _a;
if (((_a = opts.linkStrategy) == null ? void 0 : _a.type) === `HardlinkFromIndex`) {
return copyFileViaIndex(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts, opts.linkStrategy);
} else {
return copyFileDirect(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts);
@ -385,7 +387,7 @@ class FakeFS {
throw error;
}
}
createdDirectory ??= subPath;
createdDirectory ?? (createdDirectory = subPath);
if (chmod != null)
await this.chmodPromise(subPath, chmod);
if (utimes != null) {
@ -416,7 +418,7 @@ class FakeFS {
throw error;
}
}
createdDirectory ??= subPath;
createdDirectory ?? (createdDirectory = subPath);
if (chmod != null)
this.chmodSync(subPath, chmod);
if (utimes != null) {
@ -611,14 +613,12 @@ class FakeFS {
throw error;
}
}
async writeJsonPromise(p, data, { compact = false } = {}) {
const space = compact ? 0 : 2;
return await this.writeFilePromise(p, `${JSON.stringify(data, null, space)}
async writeJsonPromise(p, data) {
return await this.writeFilePromise(p, `${JSON.stringify(data, null, 2)}
`);
}
writeJsonSync(p, data, { compact = false } = {}) {
const space = compact ? 0 : 2;
return this.writeFileSync(p, `${JSON.stringify(data, null, space)}
writeJsonSync(p, data) {
return this.writeFileSync(p, `${JSON.stringify(data, null, 2)}
`);
}
async preserveTimePromise(p, cb) {
@ -852,7 +852,7 @@ class ProxiedFS extends FakeFS {
readFileSync(p, encoding) {
return this.baseFs.readFileSync(this.fsMapToBase(p), encoding);
}
readdirPromise(p, opts) {
async readdirPromise(p, opts) {
return this.baseFs.readdirPromise(this.mapToBase(p), opts);
}
readdirSync(p, opts) {
@ -902,12 +902,6 @@ class ProxiedFS extends FakeFS {
}
}
function direntToPortable(dirent) {
const portableDirent = dirent;
if (typeof dirent.path === `string`)
portableDirent.path = npath.toPortablePath(dirent.path);
return portableDirent;
}
class NodeFS extends BasePortableFakeFS {
constructor(realFs = fs) {
super();
@ -938,24 +932,12 @@ class NodeFS extends BasePortableFakeFS {
this.realFs.opendir(npath.fromPortablePath(p), this.makeCallback(resolve, reject));
}
}).then((dir) => {
const dirWithFixedPath = dir;
Object.defineProperty(dirWithFixedPath, `path`, {
value: p,
configurable: true,
writable: true
});
return dirWithFixedPath;
return Object.defineProperty(dir, `path`, { value: p, configurable: true, writable: true });
});
}
opendirSync(p, opts) {
const dir = typeof opts !== `undefined` ? this.realFs.opendirSync(npath.fromPortablePath(p), opts) : this.realFs.opendirSync(npath.fromPortablePath(p));
const dirWithFixedPath = dir;
Object.defineProperty(dirWithFixedPath, `path`, {
value: p,
configurable: true,
writable: true
});
return dirWithFixedPath;
return Object.defineProperty(dir, `path`, { value: p, configurable: true, writable: true });
}
async readPromise(fd, buffer, offset = 0, length = 0, position = -1) {
return await new Promise((resolve, reject) => {
@ -1233,32 +1215,16 @@ class NodeFS extends BasePortableFakeFS {
}
async readdirPromise(p, opts) {
return await new Promise((resolve, reject) => {
if (opts) {
if (opts.recursive && process.platform === `win32`) {
if (opts.withFileTypes) {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback((results) => resolve(results.map(direntToPortable)), reject));
} else {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback((results) => resolve(results.map(npath.toPortablePath)), reject));
}
} else {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback(resolve, reject));
}
if (opts == null ? void 0 : opts.withFileTypes) {
this.realFs.readdir(npath.fromPortablePath(p), { withFileTypes: true }, this.makeCallback(resolve, reject));
} else {
this.realFs.readdir(npath.fromPortablePath(p), this.makeCallback(resolve, reject));
this.realFs.readdir(npath.fromPortablePath(p), this.makeCallback((value) => resolve(value), reject));
}
});
}
readdirSync(p, opts) {
if (opts) {
if (opts.recursive && process.platform === `win32`) {
if (opts.withFileTypes) {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts).map(direntToPortable);
} else {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts).map(npath.toPortablePath);
}
} else {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts);
}
if (opts == null ? void 0 : opts.withFileTypes) {
return this.realFs.readdirSync(npath.fromPortablePath(p), { withFileTypes: true });
} else {
return this.realFs.readdirSync(npath.fromPortablePath(p));
}
@ -1393,9 +1359,14 @@ class VirtualFS extends ProxiedFS {
}
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10));
const HAS_CONSOLIDATED_HOOKS = major > 16 || major === 16 && minor >= 12;
const HAS_UNFLAGGED_JSON_MODULES = major > 17 || major === 17 && minor >= 5 || major === 16 && minor >= 15;
const HAS_JSON_IMPORT_ASSERTION_REQUIREMENT = major > 17 || major === 17 && minor >= 1 || major === 16 && minor > 14;
const WATCH_MODE_MESSAGE_USES_ARRAYS = major > 19 || major === 19 && minor >= 2 || major === 18 && minor >= 13;
const HAS_LAZY_LOADED_TRANSLATORS = major === 20 && minor < 6 || major === 19 && minor >= 3;
const HAS_LAZY_LOADED_TRANSLATORS = major > 19 || major === 19 && minor >= 3;
const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding(`natives`)));
const isBuiltinModule = (request) => request.startsWith(`node:`) || builtinModules.has(request);
function readPackageScope(checkPath) {
const rootSeparatorIndex = checkPath.indexOf(npath.sep);
let separatorIndex;
@ -1456,7 +1427,11 @@ function getFileFormat(filepath) {
);
}
case `.json`: {
return `json`;
if (HAS_UNFLAGGED_JSON_MODULES)
return `json`;
throw new Error(
`Unknown file extension ".json" for ${filepath}`
);
}
case `.js`: {
const pkg = readPackageScope(filepath);
@ -1477,15 +1452,38 @@ function getFileFormat(filepath) {
}
}
async function load$1(urlString, context, nextLoad) {
async function getFormat$1(resolved, context, defaultGetFormat) {
const url = tryParseURL(resolved);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetFormat(resolved, context, defaultGetFormat);
const format = getFileFormat(fileURLToPath(url));
if (format) {
return {
format
};
}
return defaultGetFormat(resolved, context, defaultGetFormat);
}
async function getSource$1(urlString, context, defaultGetSource) {
const url = tryParseURL(urlString);
if (url?.protocol !== `file:`)
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetSource(urlString, context, defaultGetSource);
return {
source: await fs.promises.readFile(fileURLToPath(url), `utf8`)
};
}
async function load$1(urlString, context, nextLoad) {
var _a;
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return nextLoad(urlString, context, nextLoad);
const filePath = fileURLToPath(url);
const format = getFileFormat(filePath);
if (!format)
return nextLoad(urlString, context, nextLoad);
if (format === `json` && context.importAssertions?.type !== `json`) {
if (HAS_JSON_IMPORT_ASSERTION_REQUIREMENT && format === `json` && ((_a = context.importAssertions) == null ? void 0 : _a.type) !== `json`) {
const err = new TypeError(`[ERR_IMPORT_ASSERTION_TYPE_MISSING]: Module "${urlString}" needs an import assertion of type "json"`);
err.code = `ERR_IMPORT_ASSERTION_TYPE_MISSING`;
throw err;
@ -1502,7 +1500,7 @@ async function load$1(urlString, context, nextLoad) {
}
return {
format,
source: format === `commonjs` ? void 0 : await fs.promises.readFile(filePath, `utf8`),
source: await fs.promises.readFile(filePath, `utf8`),
shortCircuit: true
};
}
@ -1983,7 +1981,7 @@ async function resolvePrivateRequest(specifier, issuer, context, nextResolve) {
}
async function resolve$1(originalSpecifier, context, nextResolve) {
const { findPnpApi } = moduleExports;
if (!findPnpApi || isBuiltin(originalSpecifier))
if (!findPnpApi || isBuiltinModule(originalSpecifier))
return nextResolve(originalSpecifier, context, nextResolve);
let specifier = originalSpecifier;
const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0);
@ -1993,7 +1991,7 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
specifier = fileURLToPath(url);
}
const { parentURL, conditions = [] } = context;
const issuer = parentURL && tryParseURL(parentURL)?.protocol === `file:` ? fileURLToPath(parentURL) : process.cwd();
const issuer = parentURL ? fileURLToPath(parentURL) : process.cwd();
const pnpapi = findPnpApi(issuer) ?? (url ? findPnpApi(specifier) : null);
if (!pnpapi)
return nextResolve(originalSpecifier, context, nextResolve);
@ -2042,49 +2040,36 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
if (!HAS_LAZY_LOADED_TRANSLATORS) {
const binding = process.binding(`fs`);
const originalReadFile = binding.readFileUtf8 || binding.readFileSync;
if (originalReadFile) {
binding[originalReadFile.name] = function(...args) {
const originalfstat = binding.fstat;
const ZIP_MASK = 4278190080;
const ZIP_MAGIC = 704643072;
binding.fstat = function(...args) {
const [fd, useBigint, req] = args;
if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) {
try {
return fs.readFileSync(args[0], {
encoding: `utf8`,
flag: args[1]
});
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
return originalReadFile.apply(this, args);
};
} else {
const binding2 = process.binding(`fs`);
const originalfstat = binding2.fstat;
const ZIP_MASK = 4278190080;
const ZIP_MAGIC = 704643072;
binding2.fstat = function(...args) {
const [fd, useBigint, req] = args;
if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) {
try {
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
}
return originalfstat.apply(this, args);
};
}
}
return originalfstat.apply(this, args);
};
}
const resolve = resolve$1;
const load = load$1;
const getFormat = HAS_CONSOLIDATED_HOOKS ? void 0 : getFormat$1;
const getSource = HAS_CONSOLIDATED_HOOKS ? void 0 : getSource$1;
const load = HAS_CONSOLIDATED_HOOKS ? load$1 : void 0;
export { load, resolve };
export { getFormat, getSource, load, resolve };

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show more