chore: remove legacy system-api folder
This commit is contained in:
parent
f1c295e84d
commit
88e0ccd7b9
42 changed files with 0 additions and 1751 deletions
|
@ -1,4 +0,0 @@
|
|||
node_modules/
|
||||
dist/
|
||||
sessions/
|
||||
logs/
|
|
@ -1,11 +0,0 @@
|
|||
ROOT_FOLDER=/test
|
||||
ROOT_FOLDER_HOST=/tipi
|
||||
JWT_SECRET=secret
|
||||
POSTGRES_DBNAME=postgres
|
||||
POSTGRES_HOST=localhost
|
||||
POSTGRES_USERNAME=postgres
|
||||
POSTGRES_PASSWORD=postgres
|
||||
APPS_REPO_ID=repo-id
|
||||
APPS_REPO_URL=http://test.com
|
||||
INTERNAL_IP=localhost
|
||||
STORAGE_PATH=/tipi/test
|
|
@ -1,3 +0,0 @@
|
|||
node_modules/
|
||||
dist/
|
||||
*.cjs
|
|
@ -1,27 +0,0 @@
|
|||
module.exports = {
|
||||
plugins: ['@typescript-eslint', 'import', 'react'],
|
||||
extends: ['airbnb-base', 'airbnb-typescript/base', 'eslint:recommended', 'plugin:import/typescript', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: './tsconfig.json',
|
||||
tsconfigRootDir: __dirname,
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
},
|
||||
rules: {
|
||||
'max-len': [1, { code: 200 }],
|
||||
'import/extensions': ['error', 'ignorePackages', { js: 'never', jsx: 'never', ts: 'never', tsx: 'never' }],
|
||||
'no-unused-vars': [1, { argsIgnorePattern: '^_' }],
|
||||
'@typescript-eslint/no-unused-vars': [1, { argsIgnorePattern: '^_' }],
|
||||
'max-classes-per-file': 0,
|
||||
'class-methods-use-this': 0,
|
||||
'import/prefer-default-export': 0,
|
||||
'no-underscore-dangle': 0,
|
||||
'@typescript-eslint/ban-ts-comment': 0,
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: ['**/*.test.ts', '**/*.spec.ts', '**/*.factory.ts', 'esbuild.js'] }],
|
||||
},
|
||||
globals: {
|
||||
NodeJS: true,
|
||||
},
|
||||
env: { node: true, jest: true },
|
||||
};
|
10
packages/system-api/.gitignore
vendored
10
packages/system-api/.gitignore
vendored
|
@ -1,10 +0,0 @@
|
|||
node_modules/
|
||||
dist/
|
||||
|
||||
.DS_Store
|
||||
|
||||
# testing
|
||||
coverage/
|
||||
logs/
|
||||
sessions/
|
||||
.vscode
|
|
@ -1,6 +0,0 @@
|
|||
module.exports = {
|
||||
singleQuote: true,
|
||||
semi: true,
|
||||
trailingComma: 'all',
|
||||
printWidth: 200,
|
||||
};
|
|
@ -1,9 +0,0 @@
|
|||
const childProcess: { execFile: typeof execFile } = jest.genMockFromModule('child_process');
|
||||
|
||||
const execFile = (_path: string, _args: string[], _thing: unknown, callback: () => void) => {
|
||||
callback();
|
||||
};
|
||||
|
||||
childProcess.execFile = execFile;
|
||||
|
||||
module.exports = childProcess;
|
|
@ -1,105 +0,0 @@
|
|||
import path from 'path';
|
||||
|
||||
class FsMock {
|
||||
private static instance: FsMock;
|
||||
|
||||
private mockFiles = Object.create(null);
|
||||
|
||||
static getInstance(): FsMock {
|
||||
if (!FsMock.instance) {
|
||||
FsMock.instance = new FsMock();
|
||||
}
|
||||
return FsMock.instance;
|
||||
}
|
||||
|
||||
__createMockFiles = (newMockFiles: Record<string, string>) => {
|
||||
this.mockFiles = Object.create(null);
|
||||
|
||||
// Create folder tree
|
||||
Object.keys(newMockFiles).forEach((file) => {
|
||||
const dir = path.dirname(file);
|
||||
|
||||
if (!this.mockFiles[dir]) {
|
||||
this.mockFiles[dir] = [];
|
||||
}
|
||||
|
||||
this.mockFiles[dir].push(path.basename(file));
|
||||
this.mockFiles[file] = newMockFiles[file];
|
||||
});
|
||||
};
|
||||
|
||||
__resetAllMocks = () => {
|
||||
this.mockFiles = Object.create(null);
|
||||
};
|
||||
|
||||
readFileSync = (p: string) => this.mockFiles[p];
|
||||
|
||||
existsSync = (p: string) => this.mockFiles[p] !== undefined;
|
||||
|
||||
writeFileSync = (p: string, data: string | string[]) => {
|
||||
this.mockFiles[p] = data;
|
||||
};
|
||||
|
||||
mkdirSync = (p: string) => {
|
||||
this.mockFiles[p] = Object.create(null);
|
||||
};
|
||||
|
||||
rmSync = (p: string) => {
|
||||
if (this.mockFiles[p] instanceof Array) {
|
||||
this.mockFiles[p].forEach((file: string) => {
|
||||
delete this.mockFiles[path.join(p, file)];
|
||||
});
|
||||
}
|
||||
|
||||
delete this.mockFiles[p];
|
||||
};
|
||||
|
||||
readdirSync = (p: string) => {
|
||||
const files: string[] = [];
|
||||
|
||||
const depth = p.split('/').length;
|
||||
|
||||
Object.keys(this.mockFiles).forEach((file) => {
|
||||
if (file.startsWith(p)) {
|
||||
const fileDepth = file.split('/').length;
|
||||
|
||||
if (fileDepth === depth + 1) {
|
||||
files.push(file.split('/').pop() || '');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
copyFileSync = (source: string, destination: string) => {
|
||||
this.mockFiles[destination] = this.mockFiles[source];
|
||||
};
|
||||
|
||||
copySync = (source: string, destination: string) => {
|
||||
this.mockFiles[destination] = this.mockFiles[source];
|
||||
|
||||
if (this.mockFiles[source] instanceof Array) {
|
||||
this.mockFiles[source].forEach((file: string) => {
|
||||
this.mockFiles[`${destination}/${file}`] = this.mockFiles[`${source}/${file}`];
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
createFileSync = (p: string) => {
|
||||
this.mockFiles[p] = '';
|
||||
};
|
||||
|
||||
unlinkSync = (p: string) => {
|
||||
if (this.mockFiles[p] instanceof Array) {
|
||||
this.mockFiles[p].forEach((file: string) => {
|
||||
delete this.mockFiles[path.join(p, file)];
|
||||
});
|
||||
}
|
||||
delete this.mockFiles[p];
|
||||
};
|
||||
|
||||
getMockFiles = () => this.mockFiles;
|
||||
}
|
||||
|
||||
export default FsMock.getInstance();
|
|
@ -1,11 +0,0 @@
|
|||
const cron: {
|
||||
schedule: typeof schedule;
|
||||
} = jest.genMockFromModule('node-cron');
|
||||
|
||||
const schedule = (scd: string, cb: () => void) => {
|
||||
cb();
|
||||
};
|
||||
|
||||
cron.schedule = schedule;
|
||||
|
||||
module.exports = cron;
|
|
@ -1,18 +0,0 @@
|
|||
module.exports = {
|
||||
createClient: jest.fn(() => {
|
||||
const values = new Map();
|
||||
const expirations = new Map();
|
||||
return {
|
||||
isOpen: true,
|
||||
connect: jest.fn(),
|
||||
set: (key: string, value: string, exp: number) => {
|
||||
values.set(key, value);
|
||||
expirations.set(key, exp);
|
||||
},
|
||||
get: (key: string) => values.get(key),
|
||||
quit: jest.fn(),
|
||||
del: (key: string) => values.delete(key),
|
||||
ttl: (key: string) => expirations.get(key),
|
||||
};
|
||||
}),
|
||||
};
|
|
@ -1,70 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
const esbuild = require('esbuild');
|
||||
const path = require('path');
|
||||
|
||||
const commandArgs = process.argv.slice(2);
|
||||
|
||||
const nativeNodeModulesPlugin = () => ({
|
||||
name: 'native-node-modules',
|
||||
setup(build) {
|
||||
// If a ".node" file is imported within a module in the "file" namespace, resolve
|
||||
// it to an absolute path and put it into the "node-file" virtual namespace.
|
||||
build.onResolve({ filter: /\.node$/, namespace: 'file' }, (args) => {
|
||||
const resolvedId = require.resolve(args.path, {
|
||||
paths: [args.resolveDir],
|
||||
});
|
||||
if (resolvedId.endsWith('.node')) {
|
||||
return {
|
||||
path: resolvedId,
|
||||
namespace: 'node-file',
|
||||
};
|
||||
}
|
||||
return {
|
||||
path: resolvedId,
|
||||
};
|
||||
});
|
||||
|
||||
// Files in the "node-file" virtual namespace call "require()" on the
|
||||
// path from esbuild of the ".node" file in the output directory.
|
||||
build.onLoad({ filter: /.*/, namespace: 'node-file' }, (args) => ({
|
||||
contents: `
|
||||
import path from ${JSON.stringify(args.path)}
|
||||
try { module.exports = require(path) }
|
||||
catch {}
|
||||
`,
|
||||
resolveDir: path.dirname(args.path),
|
||||
}));
|
||||
|
||||
// If a ".node" file is imported within a module in the "node-file" namespace, put
|
||||
// it in the "file" namespace where esbuild's default loading behavior will handle
|
||||
// it. It is already an absolute path since we resolved it to one above.
|
||||
build.onResolve({ filter: /\.node$/, namespace: 'node-file' }, (args) => ({
|
||||
path: args.path,
|
||||
namespace: 'file',
|
||||
}));
|
||||
|
||||
// Tell esbuild's default loading behavior to use the "file" loader for
|
||||
// these ".node" files.
|
||||
const opts = build.initialOptions;
|
||||
opts.loader = opts.loader || {};
|
||||
opts.loader['.node'] = 'file';
|
||||
},
|
||||
});
|
||||
|
||||
/* Bundle server */
|
||||
esbuild.build({
|
||||
entryPoints: ['./src/server.ts'],
|
||||
bundle: true,
|
||||
platform: 'node',
|
||||
target: 'node18',
|
||||
external: ['pg-native'],
|
||||
sourcemap: commandArgs.includes('--sourcemap'),
|
||||
watch: commandArgs.includes('--watch'),
|
||||
outfile: 'dist/server.bundle.js',
|
||||
plugins: [nativeNodeModulesPlugin()],
|
||||
logLevel: 'info',
|
||||
minifySyntax: true,
|
||||
minifyWhitespace: true,
|
||||
});
|
||||
|
||||
const glob = require('glob');
|
|
@ -1,21 +0,0 @@
|
|||
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
verbose: true,
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
setupFiles: ['<rootDir>/src/test/dotenv-config.ts'],
|
||||
setupFilesAfterEnv: ['<rootDir>/src/test/jest-setup.ts'],
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: ['src/**/*.{ts,tsx}', '!src/**/migrations/**/*.{ts,tsx}', '!**/src/config/**/*.{ts,tsx}', '!**/__tests__/**'],
|
||||
passWithNoTests: true,
|
||||
transform: {
|
||||
'^.+\\.graphql$': 'graphql-import-node/jest',
|
||||
},
|
||||
globals: {
|
||||
// NODE_ENV: 'test',
|
||||
'ts-jest': {
|
||||
isolatedModules: true,
|
||||
},
|
||||
},
|
||||
};
|
|
@ -1,13 +0,0 @@
|
|||
import { DataSource } from 'typeorm';
|
||||
|
||||
export const connectionSource = new DataSource({
|
||||
type: 'postgres',
|
||||
host: 'localhost',
|
||||
port: 5432,
|
||||
username: 'tipi',
|
||||
password: 'postgres',
|
||||
database: 'tipi',
|
||||
logging: true,
|
||||
synchronize: false,
|
||||
entities: ['src/modules/**/*.entity.ts'],
|
||||
});
|
|
@ -1,84 +0,0 @@
|
|||
{
|
||||
"name": "system-api",
|
||||
"version": "0.8.1",
|
||||
"description": "",
|
||||
"exports": "./dist/server.bundle.js",
|
||||
"engines": {
|
||||
"node": ">=14.16"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf dist",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"lint:fix": "eslint . --ext .ts --fix",
|
||||
"test": "jest --colors",
|
||||
"test:watch": "jest --watch",
|
||||
"build": "rm -rf dist && node esbuild.js",
|
||||
"build:watch": "node esbuild.js --sourcemap --watch",
|
||||
"start:dev": "NODE_ENV=development && nodemon --watch dist dist/server.bundle.js",
|
||||
"dev": "concurrently \"npm run build:watch\" \"npm run start:dev\"",
|
||||
"start": "NODE_ENV=production && node dist/server.bundle.js",
|
||||
"start:test": "NODE_ENV=test && node dist/server.bundle.js",
|
||||
"typeorm": "typeorm-ts-node-commonjs -d ./ormconfig.ts",
|
||||
"migration:generate": "npm run typeorm migration:generate"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"apollo-server-core": "^3.10.0",
|
||||
"apollo-server-express": "^3.9.0",
|
||||
"argon2": "^0.29.1",
|
||||
"axios": "^0.26.1",
|
||||
"class-validator": "^0.13.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.0.0",
|
||||
"express": "^4.17.3",
|
||||
"fs-extra": "^10.1.0",
|
||||
"graphql": "^15.3.0",
|
||||
"graphql-type-json": "^0.3.2",
|
||||
"http": "0.0.1-security",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"node-cron": "^3.0.1",
|
||||
"pg": "^8.7.3",
|
||||
"redis": "^4.3.1",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"semver": "^7.3.7",
|
||||
"type-graphql": "^1.1.1",
|
||||
"typeorm": "^0.3.11",
|
||||
"uuid": "^9.0.0",
|
||||
"validator": "^13.7.0",
|
||||
"winston": "^3.7.2",
|
||||
"zod": "^3.19.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@faker-js/faker": "^7.3.0",
|
||||
"@types/cors": "^2.8.12",
|
||||
"@types/express": "^4.17.13",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/jest": "^27.5.0",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/node": "17.0.31",
|
||||
"@types/node-cron": "^3.0.2",
|
||||
"@types/pg": "^8.6.5",
|
||||
"@types/semver": "^7.3.12",
|
||||
"@types/uuid": "^8.3.4",
|
||||
"@types/validator": "^13.7.2",
|
||||
"@typescript-eslint/eslint-plugin": "^5.18.0",
|
||||
"@typescript-eslint/parser": "^5.22.0",
|
||||
"concurrently": "^7.1.0",
|
||||
"esbuild": "^0.16.6",
|
||||
"eslint": "^8.13.0",
|
||||
"eslint-config-airbnb-typescript": "^17.0.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"glob": "^8.0.3",
|
||||
"graphql-import-node": "^0.0.5",
|
||||
"jest": "^28.1.0",
|
||||
"nodemon": "^2.0.15",
|
||||
"prettier": "2.6.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"ts-jest": "^28.0.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
import { createClient, RedisClientType } from 'redis';
|
||||
import { getConfig } from '../core/config/TipiConfig';
|
||||
|
||||
const ONE_DAY_IN_SECONDS = 60 * 60 * 24;
|
||||
|
||||
class TipiCache {
|
||||
private static instance: TipiCache;
|
||||
|
||||
private client: RedisClientType;
|
||||
|
||||
constructor() {
|
||||
const client = createClient({
|
||||
url: `redis://${getConfig().REDIS_HOST}:6379`,
|
||||
});
|
||||
|
||||
this.client = client as RedisClientType;
|
||||
}
|
||||
|
||||
public static getInstance(): TipiCache {
|
||||
if (!TipiCache.instance) {
|
||||
TipiCache.instance = new TipiCache();
|
||||
}
|
||||
|
||||
return TipiCache.instance;
|
||||
}
|
||||
|
||||
private async getClient(): Promise<RedisClientType> {
|
||||
if (!this.client.isOpen) {
|
||||
await this.client.connect();
|
||||
}
|
||||
return this.client;
|
||||
}
|
||||
|
||||
public async set(key: string, value: string, expiration = ONE_DAY_IN_SECONDS) {
|
||||
const client = await this.getClient();
|
||||
return client.set(key, value, {
|
||||
EX: expiration,
|
||||
});
|
||||
}
|
||||
|
||||
public async get(key: string) {
|
||||
const client = await this.getClient();
|
||||
return client.get(key);
|
||||
}
|
||||
|
||||
public async del(key: string) {
|
||||
const client = await this.getClient();
|
||||
return client.del(key);
|
||||
}
|
||||
|
||||
public async close() {
|
||||
return this.client.quit();
|
||||
}
|
||||
|
||||
public async ttl(key: string) {
|
||||
const client = await this.getClient();
|
||||
return client.ttl(key);
|
||||
}
|
||||
}
|
||||
|
||||
export default TipiCache.getInstance();
|
|
@ -1,6 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
const __prod__ = process.env.NODE_ENV === 'production';
|
||||
|
||||
const COOKIE_MAX_AGE = 1000 * 60 * 60 * 24 * 365 * 10;
|
||||
|
||||
export { __prod__, COOKIE_MAX_AGE };
|
|
@ -1,16 +0,0 @@
|
|||
/* eslint-disable require-await */
|
||||
import { PluginDefinition } from 'apollo-server-core';
|
||||
import { __prod__ } from '../constants/constants';
|
||||
import logger from './logger';
|
||||
|
||||
const ApolloLogs: PluginDefinition = {
|
||||
requestDidStart: async () => ({
|
||||
async didEncounterErrors(errors) {
|
||||
if (!__prod__) {
|
||||
logger.error(JSON.stringify(errors.errors));
|
||||
}
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
export { ApolloLogs };
|
|
@ -1,62 +0,0 @@
|
|||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { createLogger, format, transports } from 'winston';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
const { align, printf, timestamp, combine, colorize } = format;
|
||||
|
||||
/**
|
||||
* Production logger format
|
||||
*/
|
||||
const combinedLogFormat = combine(
|
||||
timestamp(),
|
||||
printf((info) => `${info.timestamp} > ${info.message}`),
|
||||
);
|
||||
|
||||
/**
|
||||
* Development logger format
|
||||
*/
|
||||
const combinedLogFormatDev = combine(
|
||||
colorize(),
|
||||
align(),
|
||||
printf((info) => `${info.level}: ${info.message}`),
|
||||
);
|
||||
|
||||
const productionLogger = () => {
|
||||
if (!fs.existsSync(getConfig().logs.LOGS_FOLDER)) {
|
||||
fs.mkdirSync(getConfig().logs.LOGS_FOLDER);
|
||||
}
|
||||
return createLogger({
|
||||
level: 'info',
|
||||
format: combinedLogFormat,
|
||||
transports: [
|
||||
//
|
||||
// - Write to all logs with level `info` and below to `app.log`
|
||||
// - Write all logs error (and below) to `error.log`.
|
||||
//
|
||||
new transports.File({
|
||||
filename: path.join(getConfig().logs.LOGS_FOLDER, getConfig().logs.LOGS_ERROR),
|
||||
level: 'error',
|
||||
}),
|
||||
new transports.File({
|
||||
filename: path.join(getConfig().logs.LOGS_FOLDER, getConfig().logs.LOGS_APP),
|
||||
}),
|
||||
],
|
||||
exceptionHandlers: [new transports.File({ filename: path.join(getConfig().logs.LOGS_FOLDER, getConfig().logs.LOGS_ERROR) })],
|
||||
});
|
||||
};
|
||||
|
||||
//
|
||||
// If we're not in production then log to the `console
|
||||
//
|
||||
const LoggerDev = createLogger({
|
||||
level: 'debug',
|
||||
format: combinedLogFormatDev,
|
||||
transports: [
|
||||
new transports.Console({
|
||||
level: 'debug',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
export default process.env.NODE_ENV === 'production' ? productionLogger() : LoggerDev;
|
|
@ -1,28 +0,0 @@
|
|||
/* eslint-disable max-len */
|
||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class Initial1657299198975 implements MigrationInterface {
|
||||
name = 'Initial1657299198975';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('CREATE TYPE "public"."update_status_enum" AS ENUM(\'FAILED\', \'SUCCESS\')');
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE "update" ("id" SERIAL NOT NULL, "name" character varying NOT NULL, "status" "public"."update_status_enum" NOT NULL, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), CONSTRAINT "UQ_6e7d7ecccdc972caa0ad33cb014" UNIQUE ("name"), CONSTRAINT "PK_575f77a0576d6293bc1cb752847" PRIMARY KEY ("id"))',
|
||||
);
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE "user" ("id" SERIAL NOT NULL, "username" character varying NOT NULL, "password" character varying NOT NULL, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), CONSTRAINT "UQ_78a916df40e02a9deb1c4b75edb" UNIQUE ("username"), CONSTRAINT "PK_cace4a159ff9f2512dd42373760" PRIMARY KEY ("id"))',
|
||||
);
|
||||
await queryRunner.query("CREATE TYPE \"public\".\"app_status_enum\" AS ENUM('running', 'stopped', 'installing', 'uninstalling', 'stopping', 'starting', 'missing')");
|
||||
await queryRunner.query(
|
||||
'CREATE TABLE "app" ("id" character varying NOT NULL, "status" "public"."app_status_enum" NOT NULL DEFAULT \'stopped\', "lastOpened" TIMESTAMP WITH TIME ZONE DEFAULT now(), "numOpened" integer NOT NULL DEFAULT \'0\', "config" jsonb NOT NULL, "createdAt" TIMESTAMP NOT NULL DEFAULT now(), "updatedAt" TIMESTAMP NOT NULL DEFAULT now(), CONSTRAINT "UQ_9478629fc093d229df09e560aea" UNIQUE ("id"), CONSTRAINT "PK_9478629fc093d229df09e560aea" PRIMARY KEY ("id"))',
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('DROP TABLE "app"');
|
||||
await queryRunner.query('DROP TYPE "public"."app_status_enum"');
|
||||
await queryRunner.query('DROP TABLE "user"');
|
||||
await queryRunner.query('DROP TABLE "update"');
|
||||
await queryRunner.query('DROP TYPE "public"."update_status_enum"');
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AppVersion1659645508713 implements MigrationInterface {
|
||||
name = 'AppVersion1659645508713';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE "app" ADD "version" integer DEFAULT \'0\'');
|
||||
// populate all apps with a version
|
||||
await queryRunner.query('UPDATE "app" SET "version" = \'1\'');
|
||||
// add NOT NULL constraint
|
||||
await queryRunner.query('ALTER TABLE "app" ALTER COLUMN "version" SET NOT NULL');
|
||||
|
||||
await queryRunner.query('ALTER TABLE "app" ADD CONSTRAINT "UQ_9478629fc093d229df09e560aea" UNIQUE ("id")');
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE "app" DROP CONSTRAINT "UQ_9478629fc093d229df09e560aea"');
|
||||
await queryRunner.query('ALTER TABLE "app" DROP COLUMN "version"');
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AppStatusUpdating1660071627328 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TYPE "public"."app_status_enum" ADD VALUE \'updating\'');
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TYPE "public"."app_status_enum" DROP VALUE \'updating\'');
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AppExposedDomain1662036689477 implements MigrationInterface {
|
||||
name = 'AppExposedDomain1662036689477';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE "app" ADD "exposed" boolean DEFAULT false');
|
||||
// populate all apps with exposed to false
|
||||
await queryRunner.query('UPDATE "app" SET "exposed" = false');
|
||||
// add NOT NULL constraint
|
||||
await queryRunner.query('ALTER TABLE "app" ALTER COLUMN "exposed" SET NOT NULL');
|
||||
|
||||
await queryRunner.query('ALTER TABLE "app" ADD "domain" character varying');
|
||||
await queryRunner.query('ALTER TABLE "app" ALTER COLUMN "version" SET DEFAULT \'1\'');
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('ALTER TABLE "app" ALTER COLUMN "version" SET DEFAULT \'0\'');
|
||||
await queryRunner.query('ALTER TABLE "app" DROP COLUMN "domain"');
|
||||
await queryRunner.query('ALTER TABLE "app" DROP COLUMN "exposed"');
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
export type Maybe<T> = T | null | undefined;
|
||||
|
||||
export interface IUser {
|
||||
email: string;
|
||||
name: string;
|
||||
password: string;
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
import { getConfig } from '../core/config/TipiConfig';
|
||||
|
||||
export const APP_DATA_FOLDER = 'app-data';
|
||||
export const APPS_FOLDER = 'apps';
|
||||
export const isProd = getConfig().NODE_ENV === 'production';
|
|
@ -1,227 +0,0 @@
|
|||
import fs from 'fs-extra';
|
||||
import logger from '../../config/logger/logger';
|
||||
|
||||
export enum EventTypes {
|
||||
// System events
|
||||
RESTART = 'restart',
|
||||
UPDATE = 'update',
|
||||
CLONE_REPO = 'clone_repo',
|
||||
UPDATE_REPO = 'update_repo',
|
||||
APP = 'app',
|
||||
SYSTEM_INFO = 'system_info',
|
||||
}
|
||||
|
||||
type SystemEvent = {
|
||||
id: string;
|
||||
type: EventTypes;
|
||||
args: string[];
|
||||
creationDate: Date;
|
||||
};
|
||||
|
||||
type EventStatusTypes = 'running' | 'success' | 'error' | 'waiting';
|
||||
|
||||
const WATCH_FILE = '/runtipi/state/events';
|
||||
|
||||
// File state example:
|
||||
// restart 1631231231231 running "arg1 arg2"
|
||||
class EventDispatcher {
|
||||
private static instance: EventDispatcher | null;
|
||||
|
||||
private queue: SystemEvent[] = [];
|
||||
|
||||
private lock: SystemEvent | null = null;
|
||||
|
||||
private interval: NodeJS.Timer;
|
||||
|
||||
private intervals: NodeJS.Timer[] = [];
|
||||
|
||||
constructor() {
|
||||
const timer = this.pollQueue();
|
||||
this.interval = timer;
|
||||
}
|
||||
|
||||
public static getInstance(): EventDispatcher {
|
||||
if (!EventDispatcher.instance) {
|
||||
EventDispatcher.instance = new EventDispatcher();
|
||||
}
|
||||
return EventDispatcher.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random task id
|
||||
* @returns - Random id
|
||||
*/
|
||||
private generateId() {
|
||||
return Math.random().toString(36).substring(2, 9);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect lock status and clean queue if event is done
|
||||
*/
|
||||
private collectLockStatusAndClean() {
|
||||
if (!this.lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
const status = this.getEventStatus(this.lock.id);
|
||||
|
||||
if (status === 'running' || status === 'waiting') {
|
||||
return;
|
||||
}
|
||||
|
||||
this.clearEvent(this.lock, status);
|
||||
this.lock = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll queue and run events
|
||||
*/
|
||||
private pollQueue() {
|
||||
logger.info('EventDispatcher: Polling queue...');
|
||||
|
||||
if (!this.interval) {
|
||||
const id = setInterval(() => {
|
||||
this.runEvent();
|
||||
this.collectLockStatusAndClean();
|
||||
}, 1000);
|
||||
this.intervals.push(id);
|
||||
return id;
|
||||
}
|
||||
|
||||
return this.interval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run event from the queue if there is no lock
|
||||
*/
|
||||
private async runEvent() {
|
||||
if (this.lock) {
|
||||
return;
|
||||
}
|
||||
|
||||
const event = this.queue[0];
|
||||
if (!event) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.lock = event;
|
||||
|
||||
// Write event to state file
|
||||
const args = event.args.join(' ');
|
||||
const line = `${event.type} ${event.id} waiting ${args}`;
|
||||
fs.writeFileSync(WATCH_FILE, `${line}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check event status
|
||||
* @param id - Event id
|
||||
* @returns - Event status
|
||||
*/
|
||||
private getEventStatus(id: string): EventStatusTypes {
|
||||
const event = this.queue.find((e) => e.id === id);
|
||||
|
||||
if (!event) {
|
||||
return 'success';
|
||||
}
|
||||
|
||||
// if event was created more than 3 minutes ago, it's an error
|
||||
if (new Date().getTime() - event.creationDate.getTime() > 5 * 60 * 1000) {
|
||||
return 'error';
|
||||
}
|
||||
|
||||
const file = fs.readFileSync(WATCH_FILE, 'utf8');
|
||||
const lines = file?.split('\n') || [];
|
||||
const line = lines.find((l) => l.startsWith(`${event.type} ${event.id}`));
|
||||
|
||||
if (!line) {
|
||||
return 'waiting';
|
||||
}
|
||||
|
||||
const status = line.split(' ')[2] as EventStatusTypes;
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch an event to the queue
|
||||
* @param type - Event type
|
||||
* @param args - Event arguments
|
||||
* @returns - Event object
|
||||
*/
|
||||
public dispatchEvent(type: EventTypes, args?: string[]): SystemEvent {
|
||||
const event: SystemEvent = {
|
||||
id: this.generateId(),
|
||||
type,
|
||||
args: args || [],
|
||||
creationDate: new Date(),
|
||||
};
|
||||
|
||||
this.queue.push(event);
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear event from queue
|
||||
* @param id - Event id
|
||||
*/
|
||||
private clearEvent(event: SystemEvent, status: EventStatusTypes = 'success') {
|
||||
this.queue = this.queue.filter((e) => e.id !== event.id);
|
||||
if (fs.existsSync(`/app/logs/${event.id}.log`)) {
|
||||
const log = fs.readFileSync(`/app/logs/${event.id}.log`, 'utf8');
|
||||
if (log && status === 'error') {
|
||||
logger.error(`EventDispatcher: ${event.type} ${event.id} failed with error: ${log}`);
|
||||
} else if (log) {
|
||||
logger.info(`EventDispatcher: ${event.type} ${event.id} finished with message: ${log}`);
|
||||
}
|
||||
fs.unlinkSync(`/app/logs/${event.id}.log`);
|
||||
}
|
||||
fs.writeFileSync(WATCH_FILE, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch an event to the queue and wait for it to finish
|
||||
* @param type - Event type
|
||||
* @param args - Event arguments
|
||||
* @returns - Promise that resolves when the event is done
|
||||
*/
|
||||
public async dispatchEventAsync(type: EventTypes, args?: string[]): Promise<{ success: boolean; stdout?: string }> {
|
||||
const event = this.dispatchEvent(type, args);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const interval = setInterval(() => {
|
||||
this.intervals.push(interval);
|
||||
const status = this.getEventStatus(event.id);
|
||||
|
||||
let log = '';
|
||||
if (fs.existsSync(`/app/logs/${event.id}.log`)) {
|
||||
log = fs.readFileSync(`/app/logs/${event.id}.log`, 'utf8');
|
||||
}
|
||||
|
||||
if (status === 'success') {
|
||||
clearInterval(interval);
|
||||
resolve({ success: true, stdout: log });
|
||||
} else if (status === 'error') {
|
||||
clearInterval(interval);
|
||||
resolve({ success: false, stdout: log });
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
|
||||
public clearInterval() {
|
||||
clearInterval(this.interval);
|
||||
this.intervals.forEach((i) => clearInterval(i));
|
||||
}
|
||||
|
||||
public clear() {
|
||||
this.queue = [];
|
||||
this.lock = null;
|
||||
EventDispatcher.instance = null;
|
||||
fs.writeFileSync(WATCH_FILE, '');
|
||||
}
|
||||
}
|
||||
|
||||
export const eventDispatcher = EventDispatcher.getInstance();
|
||||
|
||||
export default EventDispatcher;
|
|
@ -1,128 +0,0 @@
|
|||
import { z } from 'zod';
|
||||
import * as dotenv from 'dotenv';
|
||||
import fs from 'fs-extra';
|
||||
import { readJsonFile } from '../../modules/fs/fs.helpers';
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
dotenv.config({ path: '.env.dev' });
|
||||
} else {
|
||||
dotenv.config({ path: '.env' });
|
||||
}
|
||||
const {
|
||||
LOGS_FOLDER = '/app/logs',
|
||||
LOGS_APP = 'app.log',
|
||||
LOGS_ERROR = 'error.log',
|
||||
NODE_ENV = 'development',
|
||||
JWT_SECRET = '',
|
||||
INTERNAL_IP = '',
|
||||
TIPI_VERSION = '',
|
||||
NGINX_PORT = '80',
|
||||
APPS_REPO_ID = '',
|
||||
APPS_REPO_URL = '',
|
||||
DOMAIN = '',
|
||||
STORAGE_PATH = '/runtipi',
|
||||
REDIS_HOST = 'tipi-redis',
|
||||
} = process.env;
|
||||
|
||||
const configSchema = z.object({
|
||||
NODE_ENV: z.union([z.literal('development'), z.literal('production'), z.literal('test')]),
|
||||
REDIS_HOST: z.string(),
|
||||
status: z.union([z.literal('RUNNING'), z.literal('UPDATING'), z.literal('RESTARTING')]),
|
||||
logs: z.object({
|
||||
LOGS_FOLDER: z.string(),
|
||||
LOGS_APP: z.string(),
|
||||
LOGS_ERROR: z.string(),
|
||||
}),
|
||||
dnsIp: z.string(),
|
||||
rootFolder: z.string(),
|
||||
internalIp: z.string(),
|
||||
version: z.string(),
|
||||
jwtSecret: z.string(),
|
||||
clientUrls: z.array(z.string()),
|
||||
appsRepoId: z.string(),
|
||||
appsRepoUrl: z.string(),
|
||||
domain: z.string(),
|
||||
storagePath: z.string(),
|
||||
});
|
||||
|
||||
class Config {
|
||||
private static instance: Config;
|
||||
|
||||
private config: z.infer<typeof configSchema>;
|
||||
|
||||
constructor() {
|
||||
const envConfig: z.infer<typeof configSchema> = {
|
||||
logs: {
|
||||
LOGS_FOLDER,
|
||||
LOGS_APP,
|
||||
LOGS_ERROR,
|
||||
},
|
||||
REDIS_HOST,
|
||||
NODE_ENV: NODE_ENV as z.infer<typeof configSchema>['NODE_ENV'],
|
||||
rootFolder: '/runtipi',
|
||||
internalIp: INTERNAL_IP,
|
||||
version: TIPI_VERSION,
|
||||
jwtSecret: JWT_SECRET,
|
||||
clientUrls: ['http://localhost:3000', `http://${INTERNAL_IP}`, `http://${INTERNAL_IP}:${NGINX_PORT}`, `http://${INTERNAL_IP}:3000`, DOMAIN && `https://${DOMAIN}`].filter(Boolean),
|
||||
appsRepoId: APPS_REPO_ID,
|
||||
appsRepoUrl: APPS_REPO_URL,
|
||||
domain: DOMAIN,
|
||||
dnsIp: '9.9.9.9',
|
||||
status: 'RUNNING',
|
||||
storagePath: STORAGE_PATH,
|
||||
};
|
||||
|
||||
const parsed = configSchema.parse({
|
||||
...envConfig,
|
||||
});
|
||||
|
||||
this.config = parsed;
|
||||
}
|
||||
|
||||
public static getInstance(): Config {
|
||||
if (!Config.instance) {
|
||||
Config.instance = new Config();
|
||||
}
|
||||
return Config.instance;
|
||||
}
|
||||
|
||||
public getConfig() {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
public applyJsonConfig() {
|
||||
const fileConfig = readJsonFile('/runtipi/state/settings.json') || {};
|
||||
|
||||
const parsed = configSchema.parse({
|
||||
...this.config,
|
||||
...(fileConfig as object),
|
||||
});
|
||||
|
||||
this.config = parsed;
|
||||
}
|
||||
|
||||
public setConfig<T extends keyof typeof configSchema.shape>(key: T, value: z.infer<typeof configSchema>[T], writeFile = false) {
|
||||
const newConf: z.infer<typeof configSchema> = { ...this.getConfig() };
|
||||
newConf[key] = value;
|
||||
|
||||
this.config = configSchema.parse(newConf);
|
||||
|
||||
if (writeFile) {
|
||||
const currentJsonConf = readJsonFile('/runtipi/state/settings.json') || {};
|
||||
const parsedConf = configSchema.partial().parse(currentJsonConf);
|
||||
|
||||
parsedConf[key] = value;
|
||||
const parsed = configSchema.partial().parse(parsedConf);
|
||||
|
||||
fs.writeFileSync('/runtipi/state/settings.json', JSON.stringify(parsed));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const setConfig = <T extends keyof typeof configSchema.shape>(key: T, value: z.infer<typeof configSchema>[T], writeFile = false) => {
|
||||
Config.getInstance().setConfig(key, value, writeFile);
|
||||
};
|
||||
|
||||
export const getConfig = () => Config.getInstance().getConfig();
|
||||
|
||||
export const applyJsonConfig = () => Config.getInstance().applyJsonConfig();
|
|
@ -1,200 +0,0 @@
|
|||
import fs from 'fs-extra';
|
||||
import { eventDispatcher, EventTypes } from '../EventDispatcher';
|
||||
|
||||
const WATCH_FILE = '/runtipi/state/events';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
|
||||
// eslint-disable-next-line no-promise-executor-return
|
||||
const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
beforeEach(() => {
|
||||
eventDispatcher.clear();
|
||||
fs.writeFileSync(WATCH_FILE, '');
|
||||
fs.writeFileSync('/app/logs/123.log', 'test');
|
||||
});
|
||||
|
||||
describe('EventDispatcher - dispatchEvent', () => {
|
||||
it('should dispatch an event', () => {
|
||||
const event = eventDispatcher.dispatchEvent(EventTypes.APP);
|
||||
expect(event.id).toBeDefined();
|
||||
});
|
||||
|
||||
it('should dispatch an event with args', () => {
|
||||
const event = eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
expect(event.id).toBeDefined();
|
||||
});
|
||||
|
||||
it('Should put events into queue', async () => {
|
||||
eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
|
||||
// @ts-ignore
|
||||
const { queue } = eventDispatcher;
|
||||
|
||||
expect(queue.length).toBe(2);
|
||||
});
|
||||
|
||||
it('Should put first event into lock after 1 sec', async () => {
|
||||
eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
eventDispatcher.dispatchEvent(EventTypes.UPDATE, ['--help']);
|
||||
|
||||
// @ts-ignore
|
||||
const { queue } = eventDispatcher;
|
||||
|
||||
await wait(1050);
|
||||
|
||||
// @ts-ignore
|
||||
const { lock } = eventDispatcher;
|
||||
|
||||
expect(queue.length).toBe(2);
|
||||
expect(lock).toBeDefined();
|
||||
expect(lock?.type).toBe(EventTypes.APP);
|
||||
});
|
||||
|
||||
it('Should clear event once its status is success', async () => {
|
||||
// @ts-ignore
|
||||
jest.spyOn(eventDispatcher, 'getEventStatus').mockReturnValueOnce('success');
|
||||
eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
|
||||
await wait(1050);
|
||||
|
||||
// @ts-ignore
|
||||
const { queue } = eventDispatcher;
|
||||
|
||||
expect(queue.length).toBe(0);
|
||||
});
|
||||
|
||||
it('Should clear event once its status is error', async () => {
|
||||
// @ts-ignore
|
||||
jest.spyOn(eventDispatcher, 'getEventStatus').mockReturnValueOnce('error');
|
||||
eventDispatcher.dispatchEvent(EventTypes.APP, ['--help']);
|
||||
|
||||
await wait(1050);
|
||||
|
||||
// @ts-ignore
|
||||
const { queue } = eventDispatcher;
|
||||
|
||||
expect(queue.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - dispatchEventAsync', () => {
|
||||
it('Should dispatch an event and wait for it to finish', async () => {
|
||||
// @ts-ignore
|
||||
jest.spyOn(eventDispatcher, 'getEventStatus').mockReturnValueOnce('success');
|
||||
const { success } = await eventDispatcher.dispatchEventAsync(EventTypes.APP, ['--help']);
|
||||
|
||||
expect(success).toBe(true);
|
||||
});
|
||||
|
||||
it('Should dispatch an event and wait for it to finish with error', async () => {
|
||||
// @ts-ignore
|
||||
jest.spyOn(eventDispatcher, 'getEventStatus').mockReturnValueOnce('error');
|
||||
|
||||
const { success } = await eventDispatcher.dispatchEventAsync(EventTypes.APP, ['--help']);
|
||||
|
||||
expect(success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - runEvent', () => {
|
||||
it('Should do nothing if there is a lock', async () => {
|
||||
// @ts-ignore
|
||||
eventDispatcher.lock = { id: '123', type: EventTypes.APP, args: [] };
|
||||
|
||||
// @ts-ignore
|
||||
await eventDispatcher.runEvent();
|
||||
|
||||
// @ts-ignore
|
||||
const file = fs.readFileSync(WATCH_FILE, 'utf8');
|
||||
|
||||
expect(file).toBe('');
|
||||
});
|
||||
|
||||
it('Should do nothing if there is no event in queue', async () => {
|
||||
// @ts-ignore
|
||||
await eventDispatcher.runEvent();
|
||||
|
||||
// @ts-ignore
|
||||
const file = fs.readFileSync(WATCH_FILE, 'utf8');
|
||||
|
||||
expect(file).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - getEventStatus', () => {
|
||||
it('Should return success if event is not in the queue', async () => {
|
||||
// @ts-ignore
|
||||
eventDispatcher.queue = [];
|
||||
// @ts-ignore
|
||||
const status = eventDispatcher.getEventStatus('123');
|
||||
|
||||
expect(status).toBe('success');
|
||||
});
|
||||
|
||||
it('Should return error if event is expired', async () => {
|
||||
const dateFiveMinutesAgo = new Date(new Date().getTime() - 5 * 60 * 10000);
|
||||
// @ts-ignore
|
||||
eventDispatcher.queue = [{ id: '123', type: EventTypes.APP, args: [], creationDate: dateFiveMinutesAgo }];
|
||||
// @ts-ignore
|
||||
const status = eventDispatcher.getEventStatus('123');
|
||||
|
||||
expect(status).toBe('error');
|
||||
});
|
||||
|
||||
it('Should be waiting if line is not found in the file', async () => {
|
||||
// @ts-ignore
|
||||
eventDispatcher.queue = [{ id: '123', type: EventTypes.APP, args: [], creationDate: new Date() }];
|
||||
// @ts-ignore
|
||||
const status = eventDispatcher.getEventStatus('123');
|
||||
|
||||
expect(status).toBe('waiting');
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - clearEvent', () => {
|
||||
it('Should clear event', async () => {
|
||||
const event = { id: '123', type: EventTypes.APP, args: [], creationDate: new Date() };
|
||||
// @ts-ignore
|
||||
eventDispatcher.queue = [event];
|
||||
// @ts-ignore
|
||||
eventDispatcher.clearEvent(event);
|
||||
|
||||
// @ts-ignore
|
||||
const { queue } = eventDispatcher;
|
||||
|
||||
expect(queue.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - pollQueue', () => {
|
||||
it('Should not create a new interval if one already exists', async () => {
|
||||
// @ts-ignore
|
||||
eventDispatcher.interval = 123;
|
||||
// @ts-ignore
|
||||
const id = eventDispatcher.pollQueue();
|
||||
// @ts-ignore
|
||||
const { interval } = eventDispatcher;
|
||||
|
||||
expect(interval).toBe(123);
|
||||
expect(id).toBe(123);
|
||||
|
||||
clearInterval(interval);
|
||||
clearInterval(id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EventDispatcher - collectLockStatusAndClean', () => {
|
||||
it('Should do nothing if there is no lock', async () => {
|
||||
// @ts-ignore
|
||||
eventDispatcher.lock = null;
|
||||
// @ts-ignore
|
||||
eventDispatcher.collectLockStatusAndClean();
|
||||
|
||||
// @ts-ignore
|
||||
const { lock } = eventDispatcher;
|
||||
|
||||
expect(lock).toBeNull();
|
||||
});
|
||||
});
|
|
@ -1,99 +0,0 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import fs from 'fs-extra';
|
||||
import { readJsonFile } from '../../../modules/fs/fs.helpers';
|
||||
import { applyJsonConfig, getConfig, setConfig } from '../TipiConfig';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetModules();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: getConfig', () => {
|
||||
it('It should return config from .env', () => {
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config.NODE_ENV).toBe('test');
|
||||
expect(config.logs.LOGS_FOLDER).toBe('/app/logs');
|
||||
expect(config.logs.LOGS_APP).toBe('app.log');
|
||||
expect(config.logs.LOGS_ERROR).toBe('error.log');
|
||||
expect(config.dnsIp).toBe('9.9.9.9');
|
||||
expect(config.rootFolder).toBe('/runtipi');
|
||||
expect(config.internalIp).toBe('localhost');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: setConfig', () => {
|
||||
it('It should be able set config', () => {
|
||||
const randomWord = faker.random.word();
|
||||
setConfig('appsRepoUrl', randomWord);
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config.appsRepoUrl).toBe(randomWord);
|
||||
});
|
||||
|
||||
it('Should not be able to set invalid NODE_ENV', () => {
|
||||
// @ts-ignore
|
||||
expect(() => setConfig('NODE_ENV', 'invalid')).toThrow();
|
||||
});
|
||||
|
||||
it('Should write config to json file', () => {
|
||||
const randomWord = faker.random.word();
|
||||
setConfig('appsRepoUrl', randomWord, true);
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config.appsRepoUrl).toBe(randomWord);
|
||||
|
||||
const settingsJson = readJsonFile('/runtipi/state/settings.json') as { [key: string]: string };
|
||||
|
||||
expect(settingsJson).toBeDefined();
|
||||
expect(settingsJson.appsRepoUrl).toBe(randomWord);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: applyJsonConfig', () => {
|
||||
it('It should be able to apply json config', () => {
|
||||
const settingsJson = {
|
||||
appsRepoUrl: faker.random.word(),
|
||||
appsRepoId: faker.random.word(),
|
||||
domain: faker.random.word(),
|
||||
};
|
||||
|
||||
const MockFiles = {
|
||||
'/runtipi/state/settings.json': JSON.stringify(settingsJson),
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
applyJsonConfig();
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
|
||||
expect(config.appsRepoUrl).toBe(settingsJson.appsRepoUrl);
|
||||
expect(config.appsRepoId).toBe(settingsJson.appsRepoId);
|
||||
expect(config.domain).toBe(settingsJson.domain);
|
||||
});
|
||||
|
||||
it('Should not be able to apply an invalid value from json config', () => {
|
||||
const settingsJson = {
|
||||
appsRepoUrl: faker.random.word(),
|
||||
appsRepoId: faker.random.word(),
|
||||
domain: 10,
|
||||
};
|
||||
|
||||
const MockFiles = {
|
||||
'/runtipi/state/settings.json': JSON.stringify(settingsJson),
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
expect(() => applyJsonConfig()).toThrow();
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
import cron from 'node-cron';
|
||||
import { getConfig } from '../../config/TipiConfig';
|
||||
import startJobs from '../jobs';
|
||||
import { eventDispatcher, EventTypes } from '../../config/EventDispatcher';
|
||||
|
||||
jest.mock('node-cron');
|
||||
jest.mock('child_process');
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetModules();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: startJobs', () => {
|
||||
it('Should start cron jobs', () => {
|
||||
const spy = jest.spyOn(cron, 'schedule');
|
||||
|
||||
startJobs();
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(spy).toHaveBeenCalledWith('*/30 * * * *', expect.any(Function));
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('Should update apps repo on cron trigger', () => {
|
||||
const spy = jest.spyOn(eventDispatcher, 'dispatchEvent');
|
||||
|
||||
// Act
|
||||
startJobs();
|
||||
|
||||
// Assert
|
||||
expect(spy.mock.calls.length).toBe(2);
|
||||
expect(spy.mock.calls[0]).toEqual([EventTypes.UPDATE_REPO, [getConfig().appsRepoUrl]]);
|
||||
expect(spy.mock.calls[1]).toEqual([EventTypes.SYSTEM_INFO, []]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
|
@ -1,20 +0,0 @@
|
|||
import cron from 'node-cron';
|
||||
import logger from '../../config/logger/logger';
|
||||
import { getConfig } from '../config/TipiConfig';
|
||||
import { eventDispatcher, EventTypes } from '../config/EventDispatcher';
|
||||
|
||||
const startJobs = () => {
|
||||
logger.info('Starting cron jobs...');
|
||||
|
||||
// Every 30 minutes
|
||||
cron.schedule('*/30 * * * *', async () => {
|
||||
eventDispatcher.dispatchEvent(EventTypes.UPDATE_REPO, [getConfig().appsRepoUrl]);
|
||||
});
|
||||
|
||||
// every minute
|
||||
cron.schedule('* * * * *', () => {
|
||||
eventDispatcher.dispatchEvent(EventTypes.SYSTEM_INFO, []);
|
||||
});
|
||||
};
|
||||
|
||||
export default startJobs;
|
|
@ -1,76 +0,0 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { Request, Response } from 'express';
|
||||
import TipiCache from '../../../config/TipiCache';
|
||||
import { getConfig } from '../../config/TipiConfig';
|
||||
import getSessionMiddleware from '../sessionMiddleware';
|
||||
|
||||
describe('SessionMiddleware', () => {
|
||||
it('Should append session to request object if a valid token is present', async () => {
|
||||
// Arrange
|
||||
const session = faker.random.alphaNumeric(32);
|
||||
const userId = faker.datatype.number();
|
||||
await TipiCache.set(session, userId.toString());
|
||||
const token = jwt.sign({ id: userId, session }, getConfig().jwtSecret);
|
||||
const req = {
|
||||
headers: {
|
||||
authorization: `Bearer ${token}`,
|
||||
},
|
||||
} as Request;
|
||||
const next = jest.fn();
|
||||
const res = {} as Response;
|
||||
|
||||
// Act
|
||||
await getSessionMiddleware(req, res, next);
|
||||
|
||||
// Assert
|
||||
expect(req).toHaveProperty('session');
|
||||
expect(req.session).toHaveProperty('id');
|
||||
expect(req.session).toHaveProperty('userId');
|
||||
expect(req.session.id).toBe(session);
|
||||
expect(req.session.userId).toBe(userId);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('Should not append session to request object if a invalid token is present', async () => {
|
||||
// Arrange
|
||||
const session = faker.random.alphaNumeric(32);
|
||||
const userId = faker.datatype.number();
|
||||
await TipiCache.set(session, userId.toString());
|
||||
const token = jwt.sign({ id: userId, session }, 'invalidSecret');
|
||||
const req = {
|
||||
headers: {
|
||||
authorization: `Bearer ${token}`,
|
||||
},
|
||||
} as Request;
|
||||
const next = jest.fn();
|
||||
const res = {} as Response;
|
||||
|
||||
// Act
|
||||
await getSessionMiddleware(req, res, next);
|
||||
|
||||
// Assert
|
||||
expect(req).toHaveProperty('session');
|
||||
expect(req.session).not.toHaveProperty('id');
|
||||
expect(req.session).not.toHaveProperty('userId');
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('Should not append session to request object if a token is not present', async () => {
|
||||
// Arrange
|
||||
const req = {
|
||||
headers: {},
|
||||
} as Request;
|
||||
const next = jest.fn();
|
||||
const res = {} as Response;
|
||||
|
||||
// Act
|
||||
await getSessionMiddleware(req, res, next);
|
||||
|
||||
// Assert
|
||||
expect(req).toHaveProperty('session');
|
||||
expect(req.session).not.toHaveProperty('id');
|
||||
expect(req.session).not.toHaveProperty('userId');
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -1,21 +0,0 @@
|
|||
import { AuthChecker } from 'type-graphql';
|
||||
import User from '../../modules/auth/user.entity';
|
||||
import { MyContext } from '../../types';
|
||||
|
||||
export const customAuthChecker: AuthChecker<MyContext> = async ({ context }) => {
|
||||
// here we can read the user from context
|
||||
// and check his permission in the db against the `roles` argument
|
||||
// that comes from the `@Authorized` decorator, eg. ["ADMIN", "MODERATOR"]
|
||||
if (!context.req?.session?.userId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { userId } = context.req.session;
|
||||
const user = await User.findOne({ where: { id: userId } });
|
||||
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
|
@ -1,32 +0,0 @@
|
|||
import { NextFunction, Request, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import logger from '../../config/logger/logger';
|
||||
import TipiCache from '../../config/TipiCache';
|
||||
import { getConfig } from '../config/TipiConfig';
|
||||
|
||||
const getSessionMiddleware = async (req: Request, _: Response, next: NextFunction) => {
|
||||
req.session = {};
|
||||
|
||||
const token = req.headers.authorization?.split(' ')[1];
|
||||
|
||||
if (token) {
|
||||
try {
|
||||
const decodedToken = jwt.verify(token, getConfig().jwtSecret) as { id: number; session: string };
|
||||
|
||||
const userId = await TipiCache.get(decodedToken.session);
|
||||
|
||||
if (userId === decodedToken.id.toString()) {
|
||||
req.session = {
|
||||
userId: decodedToken.id,
|
||||
id: decodedToken.session,
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
export default getSessionMiddleware;
|
9
packages/system-api/src/declarations.d.ts
vendored
9
packages/system-api/src/declarations.d.ts
vendored
|
@ -1,9 +0,0 @@
|
|||
declare namespace Express {
|
||||
interface Request {
|
||||
session: {
|
||||
userId?: number;
|
||||
id?: string;
|
||||
};
|
||||
[key: string]: unknown;
|
||||
}
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
const objectKeys = <T extends object>(obj: T): (keyof T)[] => Object.keys(obj) as (keyof T)[];
|
||||
|
||||
export const notEmpty = <TValue>(value: TValue | null | undefined): value is TValue => value !== null && value !== undefined;
|
||||
|
||||
export default { objectKeys };
|
|
@ -1,142 +0,0 @@
|
|||
import fs from 'fs-extra';
|
||||
import { readJsonFile, readFile, readdirSync, fileExists, writeFile, createFolder, deleteFolder, getSeed } from '../fs.helpers';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
|
||||
beforeEach(() => {
|
||||
// @ts-ignore
|
||||
fs.__resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: readJsonFile', () => {
|
||||
it('should return the json file', () => {
|
||||
// Arrange
|
||||
const rawFile = '{"test": "test"}';
|
||||
const mockFiles = {
|
||||
'/runtipi/test-file.json': rawFile,
|
||||
};
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
// Act
|
||||
const file = readJsonFile('/runtipi/test-file.json');
|
||||
|
||||
// Assert
|
||||
expect(file).toEqual({ test: 'test' });
|
||||
});
|
||||
|
||||
it('should return null if the file does not exist', () => {
|
||||
expect(readJsonFile('/test')).toBeNull();
|
||||
});
|
||||
|
||||
it('Should return null if fs.readFile throws an error', () => {
|
||||
// Arrange
|
||||
// @ts-ignore
|
||||
const spy = jest.spyOn(fs, 'readFileSync');
|
||||
spy.mockImplementation(() => {
|
||||
throw new Error('Error');
|
||||
});
|
||||
|
||||
// Act
|
||||
const file = readJsonFile('/test');
|
||||
|
||||
// Assert
|
||||
expect(file).toBeNull();
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: readFile', () => {
|
||||
it('should return the file', () => {
|
||||
const rawFile = 'test';
|
||||
const mockFiles = {
|
||||
'/runtipi/test-file.txt': rawFile,
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
expect(readFile('/runtipi/test-file.txt')).toEqual('test');
|
||||
});
|
||||
|
||||
it('should return empty string if the file does not exist', () => {
|
||||
expect(readFile('/test')).toEqual('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: readdirSync', () => {
|
||||
it('should return the files', () => {
|
||||
const mockFiles = {
|
||||
'/runtipi/test/test-file.txt': 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
expect(readdirSync('/runtipi/test')).toEqual(['test-file.txt']);
|
||||
});
|
||||
|
||||
it('should return empty array if the directory does not exist', () => {
|
||||
expect(readdirSync('/test')).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: fileExists', () => {
|
||||
it('should return true if the file exists', () => {
|
||||
const mockFiles = {
|
||||
'/runtipi/test-file.txt': 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
expect(fileExists('/runtipi/test-file.txt')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return false if the file does not exist', () => {
|
||||
expect(fileExists('/test-file.txt')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: writeFile', () => {
|
||||
it('should write the file', () => {
|
||||
const spy = jest.spyOn(fs, 'writeFileSync');
|
||||
|
||||
writeFile('/runtipi/test-file.txt', 'test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith('/runtipi/test-file.txt', 'test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: createFolder', () => {
|
||||
it('should create the folder', () => {
|
||||
const spy = jest.spyOn(fs, 'mkdirSync');
|
||||
|
||||
createFolder('/test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith('/test', { recursive: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: deleteFolder', () => {
|
||||
it('should delete the folder', () => {
|
||||
const spy = jest.spyOn(fs, 'rmSync');
|
||||
|
||||
deleteFolder('/test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith('/test', { recursive: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: getSeed', () => {
|
||||
it('should return the seed', () => {
|
||||
const mockFiles = {
|
||||
'/runtipi/state/seed': 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
expect(getSeed()).toEqual('test');
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
import fs from 'fs-extra';
|
||||
|
||||
export const readJsonFile = (path: string): unknown | null => {
|
||||
try {
|
||||
const rawFile = fs.readFileSync(path).toString();
|
||||
|
||||
return JSON.parse(rawFile);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const readFile = (path: string): string => {
|
||||
try {
|
||||
return fs.readFileSync(path).toString();
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
};
|
||||
|
||||
export const readdirSync = (path: string): string[] => fs.readdirSync(path);
|
||||
|
||||
export const fileExists = (path: string): boolean => fs.existsSync(path);
|
||||
|
||||
export const writeFile = (path: string, data: string) => fs.writeFileSync(path, data);
|
||||
|
||||
export const createFolder = (path: string) => {
|
||||
if (!fileExists(path)) {
|
||||
fs.mkdirSync(path, { recursive: true });
|
||||
}
|
||||
};
|
||||
export const deleteFolder = (path: string) => fs.rmSync(path, { recursive: true });
|
||||
|
||||
export const getSeed = () => {
|
||||
const seed = readFile('/runtipi/state/seed');
|
||||
return seed.toString();
|
||||
};
|
|
@ -1,43 +0,0 @@
|
|||
import 'reflect-metadata';
|
||||
import express from 'express';
|
||||
import { createServer } from 'http';
|
||||
import cors, { CorsOptions } from 'cors';
|
||||
import logger from './config/logger/logger';
|
||||
import getSessionMiddleware from './core/middlewares/sessionMiddleware';
|
||||
import { __prod__ } from './config/constants/constants';
|
||||
import startJobs from './core/jobs/jobs';
|
||||
import { getConfig } from './core/config/TipiConfig';
|
||||
import { eventDispatcher } from './core/config/EventDispatcher';
|
||||
|
||||
const corsOptions: CorsOptions = {
|
||||
credentials: false,
|
||||
origin: (_, callback) => {
|
||||
callback(null, true);
|
||||
},
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
try {
|
||||
eventDispatcher.clear();
|
||||
|
||||
const app = express();
|
||||
const port = 3001;
|
||||
|
||||
app.use(cors(corsOptions));
|
||||
app.use(getSessionMiddleware);
|
||||
|
||||
const httpServer = createServer(app);
|
||||
|
||||
httpServer.listen(port, async () => {
|
||||
startJobs();
|
||||
|
||||
// Start apps
|
||||
logger.info(`Server running on port ${port} 🚀 Production => ${__prod__}`);
|
||||
logger.info(`Config: ${JSON.stringify(getConfig(), null, 2)}`);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
|
@ -1,4 +0,0 @@
|
|||
import 'reflect-metadata';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config({ path: '.env.test' });
|
|
@ -1,11 +0,0 @@
|
|||
import { eventDispatcher } from '../core/config/EventDispatcher';
|
||||
|
||||
jest.mock('../config/logger/logger', () => ({
|
||||
error: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
}));
|
||||
|
||||
afterAll(() => {
|
||||
eventDispatcher.clearInterval();
|
||||
});
|
|
@ -1,6 +0,0 @@
|
|||
import { Request, Response } from 'express';
|
||||
|
||||
export type MyContext = {
|
||||
req: Request;
|
||||
res: Response;
|
||||
};
|
|
@ -1,24 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2018",
|
||||
"module": "CommonJS",
|
||||
"lib": ["es2021", "ESNext.AsyncIterable"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": false,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"outDir": "./dist"
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.js", "jest.config.js"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
Loading…
Reference in a new issue