refactor(worker): move executors from cli to worker
This commit is contained in:
parent
5f18ff16de
commit
417785ee24
47 changed files with 991 additions and 790 deletions
|
@ -54,6 +54,38 @@ services:
|
|||
networks:
|
||||
- tipi_main_network
|
||||
|
||||
tipi-worker:
|
||||
container_name: tipi-worker
|
||||
image: ghcr.io/runtipi/runtipi-worker:${TIPI_VERSION}
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:3000/healthcheck']
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
retries: 120
|
||||
start_period: 5s
|
||||
depends_on:
|
||||
tipi-db:
|
||||
condition: service_healthy
|
||||
tipi-redis:
|
||||
condition: service_healthy
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
volumes:
|
||||
- /:/host/root:ro
|
||||
- /proc:/host/proc
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ${PWD}/.env:/app/.env
|
||||
- ${PWD}/state:/app/state
|
||||
- ${PWD}/repos:/app/repos
|
||||
- ${PWD}/apps:/app/apps
|
||||
- ${STORAGE_PATH:-$PWD}/app-data:/storage/app-data
|
||||
- ${PWD}/logs:/app/logs
|
||||
- ${PWD}/traefik:/app/traefik
|
||||
networks:
|
||||
- tipi_main_network
|
||||
|
||||
tipi-dashboard:
|
||||
image: ghcr.io/runtipi/runtipi:${TIPI_VERSION}
|
||||
restart: on-failure
|
||||
|
|
|
@ -14,7 +14,8 @@
|
|||
"build:meta": "esbuild ./src/index.ts --bundle --platform=node --target=node18 --outfile=dist/index.js --metafile=meta.json --analyze",
|
||||
"dev": "dotenv -e ../../.env nodemon",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"tsc": "tsc --noEmit"
|
||||
"tsc": "tsc --noEmit",
|
||||
"knip": "knip"
|
||||
},
|
||||
"pkg": {
|
||||
"assets": "assets/**/*",
|
||||
|
@ -31,10 +32,10 @@
|
|||
"@faker-js/faker": "^8.2.0",
|
||||
"@types/cli-progress": "^3.11.4",
|
||||
"@types/node": "20.8.10",
|
||||
"@types/web-push": "^3.6.2",
|
||||
"dotenv-cli": "^7.3.0",
|
||||
"esbuild": "^0.19.4",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"knip": "^2.39.0",
|
||||
"memfs": "^4.6.0",
|
||||
"nodemon": "^3.0.1",
|
||||
"pkg": "^5.8.1",
|
||||
|
@ -43,7 +44,6 @@
|
|||
"vitest": "^0.34.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@runtipi/postgres-migrations": "^5.3.0",
|
||||
"@runtipi/shared": "workspace:^",
|
||||
"axios": "^1.6.0",
|
||||
"boxen": "^7.1.1",
|
||||
|
@ -53,12 +53,8 @@
|
|||
"cli-spinners": "^2.9.1",
|
||||
"commander": "^11.1.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"ioredis": "^5.3.2",
|
||||
"log-update": "^5.0.1",
|
||||
"pg": "^8.11.3",
|
||||
"semver": "^7.5.4",
|
||||
"systeminformation": "^5.21.15",
|
||||
"web-push": "^3.6.6",
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,300 +1,64 @@
|
|||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { Queue, QueueEvents } from 'bullmq';
|
||||
import { SystemEvent, eventSchema } from '@runtipi/shared';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { compose } from '@/utils/docker-helpers';
|
||||
import { copyDataDir, generateEnvFile } from './app.helpers';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { logger } from '@/utils/logger/logger';
|
||||
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
|
||||
const getDbClient = async () => {
|
||||
const { postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getEnv();
|
||||
|
||||
const client = new pg.Client({
|
||||
host: '127.0.0.1',
|
||||
database: postgresDatabase,
|
||||
user: postgresUsername,
|
||||
password: postgresPassword,
|
||||
port: Number(postgresPort),
|
||||
});
|
||||
|
||||
await client.connect();
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export class AppExecutors {
|
||||
private readonly logger;
|
||||
|
||||
private queue: Queue;
|
||||
|
||||
private queueEvents: QueueEvents;
|
||||
|
||||
constructor() {
|
||||
this.logger = fileLogger;
|
||||
const { redisPassword } = getEnv();
|
||||
this.logger = logger;
|
||||
this.queue = new Queue('events', { connection: { host: '127.0.0.1', port: 6379, password: redisPassword } });
|
||||
this.queueEvents = new QueueEvents('events', { connection: { host: '127.0.0.1', port: 6379, password: redisPassword } });
|
||||
}
|
||||
|
||||
private handleAppError = (err: unknown) => {
|
||||
if (err instanceof Error) {
|
||||
this.logger.error(`An error occurred: ${err.message}`);
|
||||
return { success: false, message: err.message };
|
||||
}
|
||||
|
||||
return { success: false, message: `An error occurred: ${err}` };
|
||||
};
|
||||
|
||||
private getAppPaths = (appId: string) => {
|
||||
const { rootFolderHost, storagePath, appsRepoId } = getEnv();
|
||||
|
||||
const appDataDirPath = path.join(storagePath, 'app-data', appId);
|
||||
const appDirPath = path.join(rootFolderHost, 'apps', appId);
|
||||
const configJsonPath = path.join(appDirPath, 'config.json');
|
||||
const repoPath = path.join(rootFolderHost, 'repos', appsRepoId, 'apps', appId);
|
||||
|
||||
return { appDataDirPath, appDirPath, configJsonPath, repoPath };
|
||||
};
|
||||
|
||||
/**
|
||||
* Given an app id, ensures that the app folder exists in the apps folder
|
||||
* If not, copies the app folder from the repo
|
||||
* @param {string} appId - App id
|
||||
*/
|
||||
private ensureAppDir = async (appId: string) => {
|
||||
const { rootFolderHost } = getEnv();
|
||||
|
||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||
const dockerFilePath = path.join(rootFolderHost, 'apps', appId, 'docker-compose.yml');
|
||||
|
||||
if (!(await pathExists(dockerFilePath))) {
|
||||
// delete eventual app folder if exists
|
||||
this.logger.info(`Deleting app ${appId} folder if exists`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
// Copy app folder from repo
|
||||
this.logger.info(`Copying app ${appId} from repo ${getEnv().appsRepoId}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Install an app from the repo
|
||||
* @param {string} appId - The id of the app to install
|
||||
* @param {Record<string, unknown>} config - The config of the app
|
||||
*/
|
||||
public installApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
if (process.getuid && process.getgid) {
|
||||
this.logger.info(`Installing app ${appId} as User ID: ${process.getuid()}, Group ID: ${process.getgid()}`);
|
||||
} else {
|
||||
this.logger.info(`Installing app ${appId}. No User ID or Group ID found.`);
|
||||
}
|
||||
|
||||
const { rootFolderHost, appsRepoId } = getEnv();
|
||||
|
||||
const { appDirPath, repoPath, appDataDirPath } = this.getAppPaths(appId);
|
||||
|
||||
// Check if app exists in repo
|
||||
const apps = await fs.promises.readdir(path.join(rootFolderHost, 'repos', appsRepoId, 'apps'));
|
||||
|
||||
if (!apps.includes(appId)) {
|
||||
this.logger.error(`App ${appId} not found in repo ${appsRepoId}`);
|
||||
return { success: false, message: `App ${appId} not found in repo ${appsRepoId}` };
|
||||
}
|
||||
|
||||
// Delete app folder if exists
|
||||
this.logger.info(`Deleting folder ${appDirPath} if exists`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
// Create app folder
|
||||
this.logger.info(`Creating folder ${appDirPath}`);
|
||||
await fs.promises.mkdir(appDirPath, { recursive: true });
|
||||
|
||||
// Copy app folder from repo
|
||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
|
||||
// Create folder app-data folder
|
||||
this.logger.info(`Creating folder ${appDataDirPath}`);
|
||||
await fs.promises.mkdir(appDataDirPath, { recursive: true });
|
||||
|
||||
// Create app.env file
|
||||
this.logger.info(`Creating app.env file for app ${appId}`);
|
||||
await generateEnvFile(appId, config);
|
||||
|
||||
// Copy data dir
|
||||
this.logger.info(`Copying data dir for app ${appId}`);
|
||||
if (!(await pathExists(`${appDataDirPath}/data`))) {
|
||||
await copyDataDir(appId);
|
||||
}
|
||||
|
||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||
});
|
||||
|
||||
// run docker-compose up
|
||||
this.logger.info(`Running docker-compose up for app ${appId}`);
|
||||
await compose(appId, 'up -d');
|
||||
|
||||
this.logger.info(`Docker-compose up for app ${appId} finished`);
|
||||
|
||||
return { success: true, message: `App ${appId} installed successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
private generateJobId = (event: Record<string, unknown>) => {
|
||||
const { appId, action } = event;
|
||||
return `${appId}-${action}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Stops an app
|
||||
* @param {string} appId - The id of the app to stop
|
||||
* @param {Record<string, unknown>} config - The config of the app
|
||||
*/
|
||||
public stopApp = async (appId: string, config: Record<string, unknown>, skipEnvGeneration = false) => {
|
||||
public stopApp = async (appId: string) => {
|
||||
const spinner = new TerminalSpinner(`Stopping app ${appId}`);
|
||||
spinner.start();
|
||||
|
||||
try {
|
||||
spinner.start();
|
||||
this.logger.info(`Stopping app ${appId}`);
|
||||
const jobid = this.generateJobId({ appId, action: 'stop' });
|
||||
|
||||
await this.ensureAppDir(appId);
|
||||
const event = { type: 'app', command: 'stop', appid: appId, form: {} } satisfies SystemEvent;
|
||||
const job = await this.queue.add(jobid, eventSchema.parse(event));
|
||||
const result = await job.waitUntilFinished(this.queueEvents, 1000 * 60 * 5);
|
||||
|
||||
if (!skipEnvGeneration) {
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await generateEnvFile(appId, config);
|
||||
}
|
||||
await compose(appId, 'rm --force --stop');
|
||||
|
||||
this.logger.info(`App ${appId} stopped`);
|
||||
spinner.done(`App ${appId} stopped`);
|
||||
return { success: true, message: `App ${appId} stopped successfully` };
|
||||
} catch (err) {
|
||||
if (!result?.success) {
|
||||
this.logger.error(result?.message);
|
||||
spinner.fail(`Failed to stop app ${appId} see logs for more details (logs/error.log)`);
|
||||
return this.handleAppError(err);
|
||||
} else {
|
||||
spinner.done(`App ${appId} stopped`);
|
||||
}
|
||||
};
|
||||
|
||||
public startApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
public startApp = async (appId: string) => {
|
||||
const spinner = new TerminalSpinner(`Starting app ${appId}`);
|
||||
try {
|
||||
spinner.start();
|
||||
const { appDataDirPath } = this.getAppPaths(appId);
|
||||
spinner.start();
|
||||
|
||||
this.logger.info(`Starting app ${appId}`);
|
||||
const jobid = this.generateJobId({ appId, action: 'start' });
|
||||
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
const event = { type: 'app', command: 'start', appid: appId, form: {} } satisfies SystemEvent;
|
||||
const job = await this.queue.add(jobid, eventSchema.parse(event));
|
||||
const result = await job.waitUntilFinished(this.queueEvents, 1000 * 60 * 5);
|
||||
|
||||
await compose(appId, 'up --detach --force-recreate --remove-orphans --pull always');
|
||||
|
||||
this.logger.info(`App ${appId} started`);
|
||||
|
||||
this.logger.info(`Setting permissions for app ${appId}`);
|
||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||
});
|
||||
|
||||
spinner.done(`App ${appId} started`);
|
||||
return { success: true, message: `App ${appId} started successfully` };
|
||||
} catch (err) {
|
||||
if (!result.success) {
|
||||
spinner.fail(`Failed to start app ${appId} see logs for more details (logs/error.log)`);
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public uninstallApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
const { appDirPath, appDataDirPath } = this.getAppPaths(appId);
|
||||
this.logger.info(`Uninstalling app ${appId}`);
|
||||
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
await compose(appId, 'down --remove-orphans --volumes --rmi all');
|
||||
|
||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true }).catch((err) => {
|
||||
this.logger.error(`Error deleting folder ${appDirPath}: ${err.message}`);
|
||||
});
|
||||
|
||||
this.logger.info(`Deleting folder ${appDataDirPath}`);
|
||||
await fs.promises.rm(appDataDirPath, { recursive: true, force: true }).catch((err) => {
|
||||
this.logger.error(`Error deleting folder ${appDataDirPath}: ${err.message}`);
|
||||
});
|
||||
|
||||
this.logger.info(`App ${appId} uninstalled`);
|
||||
return { success: true, message: `App ${appId} uninstalled successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public updateApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||
this.logger.info(`Updating app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
|
||||
await compose(appId, 'up --detach --force-recreate --remove-orphans');
|
||||
await compose(appId, 'down --rmi all --remove-orphans');
|
||||
|
||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
|
||||
await compose(appId, 'pull');
|
||||
|
||||
return { success: true, message: `App ${appId} updated successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public regenerateAppEnv = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
return { success: true, message: `App ${appId} env file regenerated successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Start all apps with status running
|
||||
*/
|
||||
public startAllApps = async () => {
|
||||
const spinner = new TerminalSpinner('Starting apps...');
|
||||
const client = await getDbClient();
|
||||
|
||||
try {
|
||||
// Get all apps with status running
|
||||
const { rows } = await client.query(`SELECT * FROM app WHERE status = 'running'`);
|
||||
|
||||
// Update all apps with status different than running or stopped to stopped
|
||||
await client.query(`UPDATE app SET status = 'stopped' WHERE status != 'stopped' AND status != 'running' AND status != 'missing'`);
|
||||
|
||||
// Start all apps
|
||||
for (const row of rows) {
|
||||
const { id, config } = row;
|
||||
|
||||
const { success } = await this.startApp(id, config);
|
||||
|
||||
if (!success) {
|
||||
this.logger.error(`Error starting app ${id}`);
|
||||
await client.query(`UPDATE app SET status = 'stopped' WHERE id = '${id}'`);
|
||||
} else {
|
||||
await client.query(`UPDATE app SET status = 'running' WHERE id = '${id}'`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`Error starting apps: ${err}`);
|
||||
spinner.fail(`Error starting apps see logs for details (logs/error.log)`);
|
||||
} finally {
|
||||
await client.end();
|
||||
} else {
|
||||
spinner.done(`App ${appId} started`);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
export { AppExecutors } from './app/app.executors';
|
||||
export { RepoExecutors } from './repo/repo.executors';
|
||||
export { SystemExecutors } from './system/system.executors';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { Queue } from 'bullmq';
|
||||
import { Redis } from 'ioredis';
|
||||
import fs from 'fs';
|
||||
import cliProgress from 'cli-progress';
|
||||
import semver from 'semver';
|
||||
|
@ -9,20 +7,14 @@ import axios from 'axios';
|
|||
import boxen from 'boxen';
|
||||
import path from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import si from 'systeminformation';
|
||||
import { Stream } from 'stream';
|
||||
import dotenv from 'dotenv';
|
||||
import { SystemEvent } from '@runtipi/shared';
|
||||
import chalk from 'chalk';
|
||||
import { killOtherWorkers } from 'src/services/watcher/watcher';
|
||||
import { pathExists } from '@runtipi/shared';
|
||||
import { AppExecutors } from '../app/app.executors';
|
||||
import { copySystemFiles, generateSystemEnvFile, generateTlsCertificates } from './system.helpers';
|
||||
import { copySystemFiles, generateSystemEnvFile } from './system.helpers';
|
||||
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { runPostgresMigrations } from '@/utils/migrations/run-migration';
|
||||
import { getUserIds } from '@/utils/environment/user';
|
||||
import { logger } from '@/utils/logger/logger';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
|
||||
export class SystemExecutors {
|
||||
|
@ -34,7 +26,7 @@ export class SystemExecutors {
|
|||
|
||||
constructor() {
|
||||
this.rootFolder = process.cwd();
|
||||
this.logger = fileLogger;
|
||||
this.logger = logger;
|
||||
|
||||
this.envFile = path.join(this.rootFolder, '.env');
|
||||
}
|
||||
|
@ -49,58 +41,46 @@ export class SystemExecutors {
|
|||
return { success: false, message: `An error occurred: ${err}` };
|
||||
};
|
||||
|
||||
private getSystemLoad = async () => {
|
||||
const { currentLoad } = await si.currentLoad();
|
||||
const mem = await si.mem();
|
||||
const [disk0] = await si.fsSize();
|
||||
// private ensureFilePermissions = async (rootFolderHost: string) => {
|
||||
// const logger = new TerminalSpinner('');
|
||||
|
||||
return {
|
||||
cpu: { load: currentLoad },
|
||||
memory: { total: mem.total, used: mem.used, available: mem.available },
|
||||
disk: { total: disk0?.size, used: disk0?.used, available: disk0?.available },
|
||||
};
|
||||
};
|
||||
// const filesAndFolders = [
|
||||
// path.join(rootFolderHost, 'apps'),
|
||||
// path.join(rootFolderHost, 'logs'),
|
||||
// path.join(rootFolderHost, 'repos'),
|
||||
// path.join(rootFolderHost, 'state'),
|
||||
// path.join(rootFolderHost, 'traefik'),
|
||||
// path.join(rootFolderHost, '.env'),
|
||||
// path.join(rootFolderHost, 'VERSION'),
|
||||
// path.join(rootFolderHost, 'docker-compose.yml'),
|
||||
// ];
|
||||
|
||||
private ensureFilePermissions = async (rootFolderHost: string) => {
|
||||
const logger = new TerminalSpinner('');
|
||||
// const files600 = [path.join(rootFolderHost, 'traefik', 'shared', 'acme.json')];
|
||||
|
||||
const filesAndFolders = [
|
||||
path.join(rootFolderHost, 'apps'),
|
||||
path.join(rootFolderHost, 'logs'),
|
||||
path.join(rootFolderHost, 'repos'),
|
||||
path.join(rootFolderHost, 'state'),
|
||||
path.join(rootFolderHost, 'traefik'),
|
||||
path.join(rootFolderHost, '.env'),
|
||||
path.join(rootFolderHost, 'VERSION'),
|
||||
path.join(rootFolderHost, 'docker-compose.yml'),
|
||||
];
|
||||
// this.logger.info('Setting file permissions a+rwx on required files');
|
||||
// // Give permission to read and write to all files and folders for the current user
|
||||
// for (const fileOrFolder of filesAndFolders) {
|
||||
// if (await pathExists(fileOrFolder)) {
|
||||
// this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
||||
// await execAsync(`chmod -R a+rwx ${fileOrFolder}`).catch(() => {
|
||||
// logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
||||
// });
|
||||
// this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
||||
// }
|
||||
// }
|
||||
|
||||
const files600 = [path.join(rootFolderHost, 'traefik', 'shared', 'acme.json')];
|
||||
// this.logger.info('Setting file permissions 600 on required files');
|
||||
|
||||
this.logger.info('Setting file permissions a+rwx on required files');
|
||||
// Give permission to read and write to all files and folders for the current user
|
||||
for (const fileOrFolder of filesAndFolders) {
|
||||
if (await pathExists(fileOrFolder)) {
|
||||
this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
||||
await execAsync(`chmod -R a+rwx ${fileOrFolder}`).catch(() => {
|
||||
logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
||||
});
|
||||
this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info('Setting file permissions 600 on required files');
|
||||
|
||||
for (const fileOrFolder of files600) {
|
||||
if (await pathExists(fileOrFolder)) {
|
||||
this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
||||
await execAsync(`chmod 600 ${fileOrFolder}`).catch(() => {
|
||||
logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
||||
});
|
||||
this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
// for (const fileOrFolder of files600) {
|
||||
// if (await pathExists(fileOrFolder)) {
|
||||
// this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
||||
// await execAsync(`chmod 600 ${fileOrFolder}`).catch(() => {
|
||||
// logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
||||
// });
|
||||
// this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
public cleanLogs = async () => {
|
||||
try {
|
||||
|
@ -113,20 +93,6 @@ export class SystemExecutors {
|
|||
}
|
||||
};
|
||||
|
||||
public systemInfo = async () => {
|
||||
try {
|
||||
const { rootFolderHost } = getEnv();
|
||||
const systemLoad = await this.getSystemLoad();
|
||||
|
||||
await fs.promises.writeFile(path.join(rootFolderHost, 'state', 'system-info.json'), JSON.stringify(systemLoad, null, 2));
|
||||
await fs.promises.chmod(path.join(rootFolderHost, 'state', 'system-info.json'), 0o777);
|
||||
|
||||
return { success: true, message: '' };
|
||||
} catch (e) {
|
||||
return this.handleSystemError(e);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* This method will stop Tipi
|
||||
* It will stop all the apps and then stop the main containers.
|
||||
|
@ -141,7 +107,10 @@ export class SystemExecutors {
|
|||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const app of apps) {
|
||||
await appExecutor.stopApp(app, {}, true);
|
||||
spinner.setMessage(`Stopping ${app}...`);
|
||||
spinner.start();
|
||||
await appExecutor.stopApp(app);
|
||||
spinner.done(`${app} stopped`);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -164,42 +133,11 @@ export class SystemExecutors {
|
|||
* This method will start Tipi.
|
||||
* It will copy the system files, generate the system env file, pull the images and start the containers.
|
||||
*/
|
||||
public start = async (sudo = true, killWatchers = true) => {
|
||||
public start = async () => {
|
||||
const spinner = new TerminalSpinner('Starting Tipi...');
|
||||
try {
|
||||
await this.logger.flush();
|
||||
|
||||
const { isSudo } = getUserIds();
|
||||
|
||||
if (!sudo) {
|
||||
console.log(
|
||||
boxen(
|
||||
"You are running in sudoless mode. While Tipi should work as expected, you'll probably run into permission issues and will have to manually fix them. We recommend running Tipi with sudo for beginners.",
|
||||
{
|
||||
title: '⛔️Sudoless mode',
|
||||
titleAlignment: 'center',
|
||||
textAlignment: 'center',
|
||||
padding: 1,
|
||||
borderStyle: 'double',
|
||||
borderColor: 'red',
|
||||
margin: { top: 1, bottom: 1 },
|
||||
width: 80,
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
this.logger.info('Killing other workers...');
|
||||
|
||||
if (killWatchers) {
|
||||
await killOtherWorkers();
|
||||
}
|
||||
|
||||
if (!isSudo && sudo) {
|
||||
console.log(chalk.red('Tipi needs to run as root to start. Use sudo ./runtipi-cli start'));
|
||||
throw new Error('Tipi needs to run as root to start. Use sudo ./runtipi-cli start');
|
||||
}
|
||||
|
||||
spinner.setMessage('Copying system files...');
|
||||
spinner.start();
|
||||
|
||||
|
@ -208,10 +146,6 @@ export class SystemExecutors {
|
|||
|
||||
spinner.done('System files copied');
|
||||
|
||||
if (sudo) {
|
||||
await this.ensureFilePermissions(this.rootFolder);
|
||||
}
|
||||
|
||||
spinner.setMessage('Generating system env file...');
|
||||
spinner.start();
|
||||
this.logger.info('Generating system env file...');
|
||||
|
@ -238,66 +172,6 @@ export class SystemExecutors {
|
|||
await execAsync(`docker compose --env-file ${this.envFile} up --detach --remove-orphans --build`);
|
||||
spinner.done('Containers started');
|
||||
|
||||
// start watcher cli in the background
|
||||
spinner.setMessage('Starting watcher...');
|
||||
spinner.start();
|
||||
|
||||
this.logger.info('Generating TLS certificates...');
|
||||
await generateTlsCertificates({ domain: envMap.get('LOCAL_DOMAIN') });
|
||||
|
||||
if (killWatchers) {
|
||||
this.logger.info('Starting watcher...');
|
||||
const subprocess = spawn('./runtipi-cli', [process.argv[1] as string, 'watch'], { cwd: this.rootFolder, detached: true, stdio: ['ignore', 'ignore', 'ignore'] });
|
||||
subprocess.unref();
|
||||
}
|
||||
|
||||
spinner.done('Watcher started');
|
||||
|
||||
// Flush redis cache
|
||||
this.logger.info('Flushing redis cache...');
|
||||
const cache = new Redis({ host: '127.0.0.1', port: 6379, password: envMap.get('REDIS_PASSWORD'), lazyConnect: true });
|
||||
await cache.connect();
|
||||
await cache.flushdb();
|
||||
await cache.quit();
|
||||
|
||||
this.logger.info('Starting queue...');
|
||||
const queue = new Queue('events', { connection: { host: '127.0.0.1', port: 6379, password: envMap.get('REDIS_PASSWORD') } });
|
||||
this.logger.info('Obliterating queue...');
|
||||
await queue.obliterate({ force: true });
|
||||
|
||||
// Initial jobs
|
||||
this.logger.info('Adding initial jobs to queue...');
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent);
|
||||
await queue.add(`${Math.random().toString()}_repo_clone`, { type: 'repo', command: 'clone', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||
|
||||
// Scheduled jobs
|
||||
this.logger.info('Adding scheduled jobs to queue...');
|
||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent, { repeat: { pattern: '*/30 * * * *' } });
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent, { repeat: { pattern: '* * * * *' } });
|
||||
|
||||
this.logger.info('Closing queue...');
|
||||
await queue.close();
|
||||
|
||||
spinner.setMessage('Running database migrations...');
|
||||
spinner.start();
|
||||
|
||||
this.logger.info('Running database migrations...');
|
||||
await runPostgresMigrations({
|
||||
postgresHost: '127.0.0.1',
|
||||
postgresDatabase: envMap.get('POSTGRES_DBNAME') as string,
|
||||
postgresUsername: envMap.get('POSTGRES_USERNAME') as string,
|
||||
postgresPassword: envMap.get('POSTGRES_PASSWORD') as string,
|
||||
postgresPort: envMap.get('POSTGRES_PORT') as string,
|
||||
});
|
||||
|
||||
spinner.done('Database migrations complete');
|
||||
|
||||
// Start all apps
|
||||
const appExecutor = new AppExecutors();
|
||||
this.logger.info('Starting all apps...');
|
||||
await appExecutor.startAllApps();
|
||||
|
||||
console.log(
|
||||
boxen(
|
||||
`Visit: http://${envMap.get('INTERNAL_IP')}:${envMap.get(
|
||||
|
@ -329,7 +203,7 @@ export class SystemExecutors {
|
|||
public restart = async () => {
|
||||
try {
|
||||
await this.stop();
|
||||
await this.start(true, false);
|
||||
await this.start();
|
||||
return { success: true, message: '' };
|
||||
} catch (e) {
|
||||
return this.handleSystemError(e);
|
||||
|
|
|
@ -2,12 +2,8 @@ import crypto from 'crypto';
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { envMapToString, envStringToMap, settingsSchema } from '@runtipi/shared';
|
||||
import chalk from 'chalk';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { getRepoHash } from '../repo/repo.helpers';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
import { envMapToString, envStringToMap, pathExists, settingsSchema } from '@runtipi/shared';
|
||||
import { logger } from '@/utils/logger/logger';
|
||||
|
||||
type EnvKeys =
|
||||
| 'APPS_REPO_ID'
|
||||
|
@ -38,9 +34,6 @@ type EnvKeys =
|
|||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
| (string & {});
|
||||
|
||||
const OLD_DEFAULT_REPO_URL = 'https://github.com/meienberger/runtipi-appstore';
|
||||
const DEFAULT_REPO_URL = 'https://github.com/runtipi/runtipi-appstore';
|
||||
|
||||
/**
|
||||
* Reads and returns the generated seed
|
||||
*/
|
||||
|
@ -147,173 +140,41 @@ export const generateSystemEnvFile = async () => {
|
|||
|
||||
const { data } = settings;
|
||||
|
||||
if (data.appsRepoUrl === OLD_DEFAULT_REPO_URL) {
|
||||
data.appsRepoUrl = DEFAULT_REPO_URL;
|
||||
}
|
||||
|
||||
const jwtSecret = envMap.get('JWT_SECRET') || (await deriveEntropy('jwt_secret'));
|
||||
const repoId = getRepoHash(data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||
const postgresPassword = envMap.get('POSTGRES_PASSWORD') || (await deriveEntropy('postgres_password'));
|
||||
const redisPassword = envMap.get('REDIS_PASSWORD') || (await deriveEntropy('redis_password'));
|
||||
|
||||
const version = await fs.promises.readFile(path.join(rootFolder, 'VERSION'), 'utf-8');
|
||||
|
||||
envMap.set('APPS_REPO_ID', repoId);
|
||||
envMap.set('APPS_REPO_URL', data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||
envMap.set('TZ', Intl.DateTimeFormat().resolvedOptions().timeZone);
|
||||
envMap.set('INTERNAL_IP', data.listenIp || getInternalIp());
|
||||
envMap.set('DNS_IP', data.dnsIp || '9.9.9.9');
|
||||
envMap.set('ARCHITECTURE', getArchitecture());
|
||||
envMap.set('TIPI_VERSION', version);
|
||||
envMap.set('JWT_SECRET', jwtSecret);
|
||||
envMap.set('ROOT_FOLDER_HOST', rootFolder);
|
||||
envMap.set('NGINX_PORT', String(data.port || 80));
|
||||
envMap.set('NGINX_PORT_SSL', String(data.sslPort || 443));
|
||||
envMap.set('DOMAIN', data.domain || 'example.com');
|
||||
envMap.set('STORAGE_PATH', data.storagePath || rootFolder);
|
||||
envMap.set('POSTGRES_HOST', 'tipi-db');
|
||||
envMap.set('POSTGRES_DBNAME', 'tipi');
|
||||
envMap.set('POSTGRES_USERNAME', 'tipi');
|
||||
envMap.set('POSTGRES_PASSWORD', postgresPassword);
|
||||
envMap.set('POSTGRES_PORT', String(data.postgresPort || 5432));
|
||||
envMap.set('REDIS_HOST', 'tipi-redis');
|
||||
envMap.set('REDIS_PASSWORD', redisPassword);
|
||||
envMap.set('DEMO_MODE', String(data.demoMode || 'false'));
|
||||
envMap.set('GUEST_DASHBOARD', String(data.guestDashboard || 'false'));
|
||||
envMap.set('LOCAL_DOMAIN', data.localDomain || 'tipi.lan');
|
||||
envMap.set('NODE_ENV', 'production');
|
||||
|
||||
const currentUserGroup = process.getgid ? String(process.getgid()) : '1000';
|
||||
const currentUserId = process.getuid ? String(process.getuid()) : '1000';
|
||||
|
||||
envMap.set('TIPI_GID', currentUserGroup);
|
||||
envMap.set('TIPI_UID', currentUserId);
|
||||
|
||||
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
||||
|
||||
return envMap;
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets the value of an environment variable in the .env file
|
||||
*
|
||||
* @param {string} key - The key of the environment variable
|
||||
* @param {string} value - The value of the environment variable
|
||||
*/
|
||||
export const setEnvVariable = async (key: EnvKeys, value: string) => {
|
||||
const rootFolder = process.cwd();
|
||||
|
||||
const envFilePath = path.join(rootFolder, '.env');
|
||||
|
||||
if (!(await pathExists(envFilePath))) {
|
||||
await fs.promises.writeFile(envFilePath, '');
|
||||
}
|
||||
|
||||
const envFile = await fs.promises.readFile(envFilePath, 'utf-8');
|
||||
const envMap: Map<EnvKeys, string> = envStringToMap(envFile);
|
||||
|
||||
envMap.set(key, value);
|
||||
|
||||
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
||||
};
|
||||
|
||||
/**
|
||||
* Copies the system files from the assets folder to the current working directory
|
||||
*/
|
||||
export const copySystemFiles = async () => {
|
||||
// Remove old unused files
|
||||
if (await pathExists(path.join(process.cwd(), 'scripts'))) {
|
||||
fileLogger.info('Removing old scripts folder');
|
||||
await fs.promises.rmdir(path.join(process.cwd(), 'scripts'), { recursive: true });
|
||||
}
|
||||
|
||||
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
||||
|
||||
// Copy docker-compose.yml file
|
||||
fileLogger.info('Copying file docker-compose.yml');
|
||||
logger.info('Copying file docker-compose.yml');
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'docker-compose.yml'), path.join(process.cwd(), 'docker-compose.yml'));
|
||||
|
||||
// Copy VERSION file
|
||||
fileLogger.info('Copying file VERSION');
|
||||
logger.info('Copying file VERSION');
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'VERSION'), path.join(process.cwd(), 'VERSION'));
|
||||
|
||||
// Copy traefik folder from assets
|
||||
fileLogger.info('Creating traefik folders');
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'dynamic'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'shared'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'tls'), { recursive: true });
|
||||
|
||||
fileLogger.info('Copying traefik files');
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'traefik.yml'), path.join(process.cwd(), 'traefik', 'traefik.yml'));
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'dynamic', 'dynamic.yml'), path.join(process.cwd(), 'traefik', 'dynamic', 'dynamic.yml'));
|
||||
|
||||
// Create base folders
|
||||
fileLogger.info('Creating base folders');
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'apps'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'app-data'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'state'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'repos'), { recursive: true });
|
||||
|
||||
// Create media folders
|
||||
fileLogger.info('Creating media folders');
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'books'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'comics'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'movies'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'music'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'tv'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'podcasts'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'images'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'roms'), { recursive: true });
|
||||
};
|
||||
|
||||
/**
|
||||
* Given a domain, generates the TLS certificates for it to be used with Traefik
|
||||
*
|
||||
* @param {string} data.domain The domain to generate the certificates for
|
||||
*/
|
||||
export const generateTlsCertificates = async (data: { domain?: string }) => {
|
||||
if (!data.domain) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the certificate already exists, don't generate it again
|
||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', `${data.domain}.txt`))) {
|
||||
fileLogger.info(`TLS certificate for ${data.domain} already exists`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove old certificates
|
||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', 'cert.pem'))) {
|
||||
fileLogger.info('Removing old TLS certificate');
|
||||
await fs.promises.unlink(path.join(process.cwd(), 'traefik', 'tls', 'cert.pem'));
|
||||
}
|
||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', 'key.pem'))) {
|
||||
fileLogger.info('Removing old TLS key');
|
||||
await fs.promises.unlink(path.join(process.cwd(), 'traefik', 'tls', 'key.pem'));
|
||||
}
|
||||
|
||||
const subject = `/O=runtipi.io/OU=IT/CN=*.${data.domain}/emailAddress=webmaster@${data.domain}`;
|
||||
const subjectAltName = `DNS:*.${data.domain},DNS:${data.domain}`;
|
||||
|
||||
try {
|
||||
fileLogger.info(`Generating TLS certificate for ${data.domain}`);
|
||||
await execAsync(`openssl req -x509 -newkey rsa:4096 -keyout traefik/tls/key.pem -out traefik/tls/cert.pem -days 365 -subj "${subject}" -addext "subjectAltName = ${subjectAltName}" -nodes`);
|
||||
fileLogger.info(`Writing txt file for ${data.domain}`);
|
||||
await fs.promises.writeFile(path.join(process.cwd(), 'traefik', 'tls', `${data.domain}.txt`), '');
|
||||
} catch (error) {
|
||||
fileLogger.error(error);
|
||||
console.error(chalk.red('✗'), 'Failed to generate TLS certificates');
|
||||
}
|
||||
};
|
||||
|
|
|
@ -3,7 +3,6 @@ import { program } from 'commander';
|
|||
|
||||
import chalk from 'chalk';
|
||||
import { description, version } from '../package.json';
|
||||
import { startWorker } from './services/watcher/watcher';
|
||||
import { AppExecutors, SystemExecutors } from './executors';
|
||||
|
||||
const main = async () => {
|
||||
|
@ -11,22 +10,13 @@ const main = async () => {
|
|||
|
||||
program.name('./runtipi-cli').usage('<command> [options]');
|
||||
|
||||
program
|
||||
.command('watch')
|
||||
.description('Watcher script for events queue')
|
||||
.action(async () => {
|
||||
console.log('Starting watcher');
|
||||
startWorker();
|
||||
});
|
||||
|
||||
program
|
||||
.command('start')
|
||||
.description('Start tipi')
|
||||
.addHelpText('after', '\nExample call: sudo ./runtipi-cli start')
|
||||
.option('--no-sudo', 'Skip sudo usage')
|
||||
.action(async (options) => {
|
||||
.action(async () => {
|
||||
const systemExecutors = new SystemExecutors();
|
||||
await systemExecutors.start(options.sudo);
|
||||
await systemExecutors.start();
|
||||
});
|
||||
|
||||
program
|
||||
|
@ -81,10 +71,10 @@ const main = async () => {
|
|||
const appExecutors = new AppExecutors();
|
||||
switch (command) {
|
||||
case 'start':
|
||||
await appExecutors.startApp(app, {});
|
||||
await appExecutors.startApp(app);
|
||||
break;
|
||||
case 'stop':
|
||||
await appExecutors.stopApp(app, {}, true);
|
||||
await appExecutors.stopApp(app);
|
||||
break;
|
||||
default:
|
||||
console.log(chalk.red('✗'), 'Unknown command');
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
/**
|
||||
* Returns the user id and group id of the current user
|
||||
*/
|
||||
export const getUserIds = () => {
|
||||
if (process.getgid && process.getuid) {
|
||||
const isSudo = process.getgid() === 0 && process.getuid() === 0;
|
||||
|
||||
return { uid: process.getuid(), gid: process.getgid(), isSudo };
|
||||
}
|
||||
|
||||
return { uid: 1000, gid: 1000, isSudo: false };
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
import fs from 'fs';
|
||||
|
||||
export const pathExists = async (path: string): Promise<boolean> => {
|
||||
return fs.promises
|
||||
.access(path)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
};
|
|
@ -1 +0,0 @@
|
|||
export * from './fs-helpers';
|
|
@ -1,58 +0,0 @@
|
|||
import fs from 'fs';
|
||||
import { createLogger } from '@runtipi/shared';
|
||||
import path from 'path';
|
||||
|
||||
function streamLogToHistory(logsFolder: string, logFile: string) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const appLogReadStream = fs.createReadStream(path.join(logsFolder, logFile), 'utf-8');
|
||||
const appLogHistoryWriteStream = fs.createWriteStream(path.join(logsFolder, `${logFile}.history`), { flags: 'a' });
|
||||
|
||||
appLogReadStream
|
||||
.pipe(appLogHistoryWriteStream)
|
||||
.on('finish', () => {
|
||||
fs.writeFileSync(path.join(logsFolder, logFile), '');
|
||||
resolve(true);
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
class FileLogger {
|
||||
private winstonLogger = createLogger('cli', path.join(process.cwd(), 'logs'));
|
||||
|
||||
private logsFolder = path.join(process.cwd(), 'logs');
|
||||
|
||||
public flush = async () => {
|
||||
try {
|
||||
if (fs.existsSync(path.join(this.logsFolder, 'app.log'))) {
|
||||
await streamLogToHistory(this.logsFolder, 'app.log');
|
||||
}
|
||||
if (fs.existsSync(path.join(this.logsFolder, 'error.log'))) {
|
||||
await streamLogToHistory(this.logsFolder, 'error.log');
|
||||
}
|
||||
this.winstonLogger.info('Logs flushed');
|
||||
} catch (error) {
|
||||
this.winstonLogger.error('Error flushing logs', error);
|
||||
}
|
||||
};
|
||||
|
||||
public error = (...message: unknown[]) => {
|
||||
this.winstonLogger.error(message.join(' '));
|
||||
};
|
||||
|
||||
public info = (...message: unknown[]) => {
|
||||
this.winstonLogger.info(message.join(' '));
|
||||
};
|
||||
|
||||
public warn = (...message: unknown[]) => {
|
||||
this.winstonLogger.warn(message.join(' '));
|
||||
};
|
||||
|
||||
public debug = (...message: unknown[]) => {
|
||||
this.winstonLogger.debug(message.join(' '));
|
||||
};
|
||||
}
|
||||
|
||||
export const fileLogger = new FileLogger();
|
4
packages/cli/src/utils/logger/logger.ts
Normal file
4
packages/cli/src/utils/logger/logger.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import { FileLogger } from '@runtipi/shared';
|
||||
import path from 'node:path';
|
||||
|
||||
export const logger = new FileLogger('cli', path.join(process.cwd(), 'logs'));
|
|
@ -4,7 +4,7 @@ api:
|
|||
|
||||
providers:
|
||||
docker:
|
||||
endpoint: "unix:///var/run/docker.sock"
|
||||
endpoint: 'unix:///var/run/docker.sock'
|
||||
watch: true
|
||||
exposedByDefault: false
|
||||
file:
|
||||
|
@ -13,9 +13,9 @@ providers:
|
|||
|
||||
entryPoints:
|
||||
web:
|
||||
address: ":80"
|
||||
address: ':80'
|
||||
websecure:
|
||||
address: ":443"
|
||||
address: ':443'
|
||||
http:
|
||||
tls:
|
||||
certResolver: myresolver
|
||||
|
@ -23,7 +23,7 @@ entryPoints:
|
|||
certificatesResolvers:
|
||||
myresolver:
|
||||
acme:
|
||||
email: acme@thisprops.com
|
||||
email: acme@thisprops.com
|
||||
storage: /shared/acme.json
|
||||
httpChallenge:
|
||||
entryPoint: web
|
2
packages/worker/src/config/constants.ts
Normal file
2
packages/worker/src/config/constants.ts
Normal file
|
@ -0,0 +1,2 @@
|
|||
export const ROOT_FOLDER = '/app';
|
||||
export const STORAGE_FOLDER = '/storage';
|
1
packages/worker/src/config/index.ts
Normal file
1
packages/worker/src/config/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './constants';
|
90
packages/worker/src/index.ts
Normal file
90
packages/worker/src/index.ts
Normal file
|
@ -0,0 +1,90 @@
|
|||
import { SystemEvent } from '@runtipi/shared';
|
||||
import http from 'node:http';
|
||||
import path from 'node:path';
|
||||
import Redis from 'ioredis';
|
||||
import dotenv from 'dotenv';
|
||||
import { Queue } from 'bullmq';
|
||||
import { copySystemFiles, generateSystemEnvFile, generateTlsCertificates } from '@/lib/system';
|
||||
import { runPostgresMigrations } from '@/lib/migrations';
|
||||
import { startWorker } from './watcher/watcher';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { AppExecutors } from './services';
|
||||
|
||||
const rootFolder = '/app';
|
||||
const envFile = path.join(rootFolder, '.env');
|
||||
|
||||
const main = async () => {
|
||||
try {
|
||||
await logger.flush();
|
||||
|
||||
logger.info('Copying system files...');
|
||||
await copySystemFiles();
|
||||
|
||||
logger.info('Generating system env file...');
|
||||
const envMap = await generateSystemEnvFile();
|
||||
|
||||
// Reload env variables after generating the env file
|
||||
logger.info('Reloading env variables...');
|
||||
dotenv.config({ path: envFile, override: true });
|
||||
|
||||
logger.info('Generating TLS certificates...');
|
||||
await generateTlsCertificates({ domain: envMap.get('LOCAL_DOMAIN') });
|
||||
|
||||
logger.info('Starting queue...');
|
||||
const queue = new Queue('events', { connection: { host: envMap.get('REDIS_HOST'), port: 6379, password: envMap.get('REDIS_PASSWORD') } });
|
||||
logger.info('Obliterating queue...');
|
||||
await queue.obliterate({ force: true });
|
||||
|
||||
// Initial jobs
|
||||
logger.info('Adding initial jobs to queue...');
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent);
|
||||
await queue.add(`${Math.random().toString()}_repo_clone`, { type: 'repo', command: 'clone', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||
|
||||
// Scheduled jobs
|
||||
logger.info('Adding scheduled jobs to queue...');
|
||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent, { repeat: { pattern: '*/30 * * * *' } });
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent, { repeat: { pattern: '* * * * *' } });
|
||||
|
||||
logger.info('Closing queue...');
|
||||
await queue.close();
|
||||
|
||||
logger.info('Running database migrations...');
|
||||
await runPostgresMigrations({
|
||||
postgresHost: envMap.get('POSTGRES_HOST') as string,
|
||||
postgresDatabase: envMap.get('POSTGRES_DBNAME') as string,
|
||||
postgresUsername: envMap.get('POSTGRES_USERNAME') as string,
|
||||
postgresPassword: envMap.get('POSTGRES_PASSWORD') as string,
|
||||
postgresPort: envMap.get('POSTGRES_PORT') as string,
|
||||
});
|
||||
|
||||
// Set status to running
|
||||
logger.info('Setting status to running...');
|
||||
const cache = new Redis({ host: envMap.get('REDIS_HOST'), port: 6379, password: envMap.get('REDIS_PASSWORD') });
|
||||
await cache.set('status', 'RUNNING');
|
||||
await cache.quit();
|
||||
|
||||
// Start all apps
|
||||
const appExecutor = new AppExecutors();
|
||||
logger.info('Starting all apps...');
|
||||
await appExecutor.startAllApps();
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
if (req.url === '/healthcheck') {
|
||||
res.writeHead(200);
|
||||
res.end('OK');
|
||||
} else {
|
||||
res.writeHead(404);
|
||||
res.end('Not Found');
|
||||
}
|
||||
});
|
||||
|
||||
server.listen(3000, () => {
|
||||
startWorker();
|
||||
});
|
||||
} catch (e) {
|
||||
console.log('YO', e);
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
|
@ -1,11 +1,11 @@
|
|||
import path from 'path';
|
||||
import { getEnv } from '../environment/environment';
|
||||
import { pathExists } from '../fs-helpers/fs-helpers';
|
||||
import { fileLogger } from '../logger/file-logger';
|
||||
import { execAsync } from '../exec-async/execAsync';
|
||||
import { execAsync, pathExists } from '@runtipi/shared';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getEnv } from '@/lib/environment';
|
||||
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||
|
||||
const composeUp = async (args: string[]) => {
|
||||
fileLogger.info(`Running docker compose with args ${args.join(' ')}`);
|
||||
logger.info(`Running docker compose with args ${args.join(' ')}`);
|
||||
const { stdout, stderr } = await execAsync(`docker compose ${args.join(' ')}`);
|
||||
|
||||
return { stdout, stderr };
|
||||
|
@ -17,14 +17,14 @@ const composeUp = async (args: string[]) => {
|
|||
* @param {string} command - Command to execute
|
||||
*/
|
||||
export const compose = async (appId: string, command: string) => {
|
||||
const { arch, rootFolderHost, appsRepoId, storagePath } = getEnv();
|
||||
const appDataDirPath = path.join(storagePath, 'app-data', appId);
|
||||
const appDirPath = path.join(rootFolderHost, 'apps', appId);
|
||||
const { arch, appsRepoId } = getEnv();
|
||||
const appDataDirPath = path.join(STORAGE_FOLDER, 'app-data', appId);
|
||||
const appDirPath = path.join(ROOT_FOLDER, 'apps', appId);
|
||||
|
||||
const args: string[] = [`--env-file ${path.join(appDataDirPath, 'app.env')}`];
|
||||
|
||||
// User custom env file
|
||||
const userEnvFile = path.join(rootFolderHost, 'user-config', appId, 'app.env');
|
||||
const userEnvFile = path.join(ROOT_FOLDER, 'user-config', appId, 'app.env');
|
||||
if (await pathExists(userEnvFile)) {
|
||||
args.push(`--env-file ${userEnvFile}`);
|
||||
}
|
||||
|
@ -37,11 +37,11 @@ export const compose = async (appId: string, command: string) => {
|
|||
}
|
||||
args.push(`-f ${composeFile}`);
|
||||
|
||||
const commonComposeFile = path.join(rootFolderHost, 'repos', appsRepoId, 'apps', 'docker-compose.common.yml');
|
||||
const commonComposeFile = path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', 'docker-compose.common.yml');
|
||||
args.push(`-f ${commonComposeFile}`);
|
||||
|
||||
// User defined overrides
|
||||
const userComposeFile = path.join(rootFolderHost, 'user-config', appId, 'docker-compose.yml');
|
||||
const userComposeFile = path.join(ROOT_FOLDER, 'user-config', appId, 'docker-compose.yml');
|
||||
if (await pathExists(userComposeFile)) {
|
||||
args.push(`--file ${userComposeFile}`);
|
||||
}
|
62
packages/worker/src/lib/environment/environment.ts
Normal file
62
packages/worker/src/lib/environment/environment.ts
Normal file
|
@ -0,0 +1,62 @@
|
|||
import { z } from 'zod';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
dotenv.config({ path: '.env.dev', override: true });
|
||||
} else {
|
||||
dotenv.config({ override: true });
|
||||
}
|
||||
|
||||
const environmentSchema = z
|
||||
.object({
|
||||
STORAGE_PATH: z.string(),
|
||||
ROOT_FOLDER_HOST: z.string(),
|
||||
APPS_REPO_ID: z.string(),
|
||||
ARCHITECTURE: z.enum(['arm64', 'amd64']),
|
||||
INTERNAL_IP: z.string().ip().or(z.literal('localhost')),
|
||||
TIPI_VERSION: z.string(),
|
||||
REDIS_PASSWORD: z.string(),
|
||||
REDIS_HOST: z.string(),
|
||||
POSTGRES_PORT: z.string(),
|
||||
POSTGRES_USERNAME: z.string(),
|
||||
POSTGRES_PASSWORD: z.string(),
|
||||
POSTGRES_DBNAME: z.string(),
|
||||
POSTGRES_HOST: z.string(),
|
||||
})
|
||||
.transform((env) => {
|
||||
const {
|
||||
STORAGE_PATH = '/app',
|
||||
ARCHITECTURE,
|
||||
ROOT_FOLDER_HOST,
|
||||
APPS_REPO_ID,
|
||||
INTERNAL_IP,
|
||||
TIPI_VERSION,
|
||||
REDIS_PASSWORD,
|
||||
REDIS_HOST,
|
||||
POSTGRES_DBNAME,
|
||||
POSTGRES_PASSWORD,
|
||||
POSTGRES_USERNAME,
|
||||
POSTGRES_PORT,
|
||||
POSTGRES_HOST,
|
||||
...rest
|
||||
} = env;
|
||||
|
||||
return {
|
||||
storagePath: STORAGE_PATH,
|
||||
rootFolderHost: ROOT_FOLDER_HOST,
|
||||
appsRepoId: APPS_REPO_ID,
|
||||
arch: ARCHITECTURE,
|
||||
tipiVersion: TIPI_VERSION,
|
||||
internalIp: INTERNAL_IP,
|
||||
redisPassword: REDIS_PASSWORD,
|
||||
redisHost: REDIS_HOST,
|
||||
postgresPort: POSTGRES_PORT,
|
||||
postgresUsername: POSTGRES_USERNAME,
|
||||
postgresPassword: POSTGRES_PASSWORD,
|
||||
postgresDatabase: POSTGRES_DBNAME,
|
||||
postgresHost: POSTGRES_HOST,
|
||||
...rest,
|
||||
};
|
||||
});
|
||||
|
||||
export const getEnv = () => environmentSchema.parse(process.env);
|
1
packages/worker/src/lib/environment/index.ts
Normal file
1
packages/worker/src/lib/environment/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export { getEnv } from './environment';
|
1
packages/worker/src/lib/logger/index.ts
Normal file
1
packages/worker/src/lib/logger/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export { logger } from './logger';
|
4
packages/worker/src/lib/logger/logger.ts
Normal file
4
packages/worker/src/lib/logger/logger.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
import { FileLogger } from '@runtipi/shared';
|
||||
import path from 'node:path';
|
||||
|
||||
export const logger = new FileLogger('worker', path.join('/app', 'logs'), true);
|
1
packages/worker/src/lib/migrations/index.ts
Normal file
1
packages/worker/src/lib/migrations/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export { runPostgresMigrations } from './run-migration';
|
|
@ -1,7 +1,8 @@
|
|||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { migrate } from '@runtipi/postgres-migrations';
|
||||
import { fileLogger } from '../logger/file-logger';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { ROOT_FOLDER } from '@/config/constants';
|
||||
|
||||
type MigrationParams = {
|
||||
postgresHost: string;
|
||||
|
@ -12,13 +13,13 @@ type MigrationParams = {
|
|||
};
|
||||
|
||||
export const runPostgresMigrations = async (params: MigrationParams) => {
|
||||
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
||||
const assetsFolder = path.join(ROOT_FOLDER, 'assets');
|
||||
|
||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = params;
|
||||
|
||||
fileLogger.info('Starting database migration');
|
||||
logger.info('Starting database migration');
|
||||
|
||||
fileLogger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||
logger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||
|
||||
const client = new pg.Client({
|
||||
user: postgresUsername,
|
||||
|
@ -29,28 +30,28 @@ export const runPostgresMigrations = async (params: MigrationParams) => {
|
|||
});
|
||||
await client.connect();
|
||||
|
||||
fileLogger.info('Client connected');
|
||||
logger.info('Client connected');
|
||||
|
||||
try {
|
||||
const { rows } = await client.query('SELECT * FROM migrations');
|
||||
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
||||
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
||||
fileLogger.info('Found legacy migration. Deleting table migrations');
|
||||
logger.info('Found legacy migration. Deleting table migrations');
|
||||
await client.query('DROP TABLE migrations');
|
||||
}
|
||||
} catch (e) {
|
||||
fileLogger.info('Migrations table not found, creating it');
|
||||
logger.info('Migrations table not found, creating it');
|
||||
}
|
||||
|
||||
fileLogger.info('Running migrations');
|
||||
logger.info('Running migrations');
|
||||
try {
|
||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||
} catch (e) {
|
||||
fileLogger.error('Error running migrations. Dropping table migrations and trying again');
|
||||
logger.error('Error running migrations. Dropping table migrations and trying again');
|
||||
await client.query('DROP TABLE migrations');
|
||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||
}
|
||||
|
||||
fileLogger.info('Migration complete');
|
||||
logger.info('Migration complete');
|
||||
await client.end();
|
||||
};
|
1
packages/worker/src/lib/system/index.ts
Normal file
1
packages/worker/src/lib/system/index.ts
Normal file
|
@ -0,0 +1 @@
|
|||
export { setEnvVariable, copySystemFiles, generateSystemEnvFile, ensureFilePermissions, generateTlsCertificates } from './system.helpers';
|
287
packages/worker/src/lib/system/system.helpers.ts
Normal file
287
packages/worker/src/lib/system/system.helpers.ts
Normal file
|
@ -0,0 +1,287 @@
|
|||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { envMapToString, envStringToMap, execAsync, pathExists, settingsSchema } from '@runtipi/shared';
|
||||
import { logger } from '../logger/logger';
|
||||
import { getRepoHash } from '../../services/repo/repo.helpers';
|
||||
import { ROOT_FOLDER } from '@/config/constants';
|
||||
|
||||
type EnvKeys =
|
||||
| 'APPS_REPO_ID'
|
||||
| 'APPS_REPO_URL'
|
||||
| 'TZ'
|
||||
| 'INTERNAL_IP'
|
||||
| 'DNS_IP'
|
||||
| 'ARCHITECTURE'
|
||||
| 'TIPI_VERSION'
|
||||
| 'JWT_SECRET'
|
||||
| 'ROOT_FOLDER_HOST'
|
||||
| 'NGINX_PORT'
|
||||
| 'NGINX_PORT_SSL'
|
||||
| 'DOMAIN'
|
||||
| 'STORAGE_PATH'
|
||||
| 'POSTGRES_PORT'
|
||||
| 'POSTGRES_HOST'
|
||||
| 'POSTGRES_DBNAME'
|
||||
| 'POSTGRES_PASSWORD'
|
||||
| 'POSTGRES_USERNAME'
|
||||
| 'REDIS_HOST'
|
||||
| 'REDIS_PASSWORD'
|
||||
| 'LOCAL_DOMAIN'
|
||||
| 'DEMO_MODE'
|
||||
| 'GUEST_DASHBOARD'
|
||||
| 'TIPI_GID'
|
||||
| 'TIPI_UID'
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
| (string & {});
|
||||
|
||||
const OLD_DEFAULT_REPO_URL = 'https://github.com/meienberger/runtipi-appstore';
|
||||
const DEFAULT_REPO_URL = 'https://github.com/runtipi/runtipi-appstore';
|
||||
|
||||
/**
|
||||
* Reads and returns the generated seed
|
||||
*/
|
||||
const getSeed = async () => {
|
||||
const seedFilePath = path.join(ROOT_FOLDER, 'state', 'seed');
|
||||
|
||||
if (!(await pathExists(seedFilePath))) {
|
||||
throw new Error('Seed file not found');
|
||||
}
|
||||
|
||||
const seed = await fs.promises.readFile(seedFilePath, 'utf-8');
|
||||
|
||||
return seed;
|
||||
};
|
||||
|
||||
/**
|
||||
* Derives a new entropy value from the provided entropy and the seed
|
||||
* @param {string} entropy - The entropy value to derive from
|
||||
*/
|
||||
const deriveEntropy = async (entropy: string) => {
|
||||
const seed = await getSeed();
|
||||
const hmac = crypto.createHmac('sha256', seed);
|
||||
hmac.update(entropy);
|
||||
|
||||
return hmac.digest('hex');
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a random seed if it does not exist yet
|
||||
*/
|
||||
const generateSeed = async () => {
|
||||
if (!(await pathExists(path.join(ROOT_FOLDER, 'state', 'seed')))) {
|
||||
const randomBytes = crypto.randomBytes(32);
|
||||
const seed = randomBytes.toString('hex');
|
||||
|
||||
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'state', 'seed'), seed);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the architecture of the current system
|
||||
*/
|
||||
const getArchitecture = () => {
|
||||
const arch = os.arch();
|
||||
|
||||
if (arch === 'arm64') return 'arm64';
|
||||
if (arch === 'x64') return 'amd64';
|
||||
|
||||
throw new Error(`Unsupported architecture: ${arch}`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a valid .env file from the settings.json file
|
||||
*/
|
||||
export const generateSystemEnvFile = async () => {
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'state'), { recursive: true });
|
||||
const settingsFilePath = path.join(ROOT_FOLDER, 'state', 'settings.json');
|
||||
const envFilePath = path.join(ROOT_FOLDER, '.env');
|
||||
|
||||
if (!(await pathExists(envFilePath))) {
|
||||
await fs.promises.writeFile(envFilePath, '');
|
||||
}
|
||||
|
||||
const envFile = await fs.promises.readFile(envFilePath, 'utf-8');
|
||||
|
||||
const envMap: Map<EnvKeys, string> = envStringToMap(envFile);
|
||||
|
||||
if (!(await pathExists(settingsFilePath))) {
|
||||
await fs.promises.writeFile(settingsFilePath, JSON.stringify({}));
|
||||
}
|
||||
|
||||
const settingsFile = await fs.promises.readFile(settingsFilePath, 'utf-8');
|
||||
|
||||
const settings = settingsSchema.safeParse(JSON.parse(settingsFile));
|
||||
|
||||
if (!settings.success) {
|
||||
throw new Error(`Invalid settings.json file: ${settings.error.message}`);
|
||||
}
|
||||
|
||||
await generateSeed();
|
||||
|
||||
const { data } = settings;
|
||||
|
||||
if (data.appsRepoUrl === OLD_DEFAULT_REPO_URL) {
|
||||
data.appsRepoUrl = DEFAULT_REPO_URL;
|
||||
}
|
||||
|
||||
const jwtSecret = envMap.get('JWT_SECRET') || (await deriveEntropy('jwt_secret'));
|
||||
const repoId = getRepoHash(data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||
|
||||
const rootFolderHost = envMap.get('ROOT_FOLDER_HOST');
|
||||
const internalIp = envMap.get('INTERNAL_IP');
|
||||
|
||||
if (!rootFolderHost) {
|
||||
throw new Error('ROOT_FOLDER_HOST not set in .env file');
|
||||
}
|
||||
|
||||
if (!internalIp) {
|
||||
throw new Error('INTERNAL_IP not set in .env file');
|
||||
}
|
||||
|
||||
envMap.set('APPS_REPO_ID', repoId);
|
||||
envMap.set('APPS_REPO_URL', data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||
envMap.set('TZ', Intl.DateTimeFormat().resolvedOptions().timeZone);
|
||||
envMap.set('INTERNAL_IP', data.listenIp || internalIp);
|
||||
envMap.set('DNS_IP', data.dnsIp || '9.9.9.9');
|
||||
envMap.set('ARCHITECTURE', getArchitecture());
|
||||
envMap.set('JWT_SECRET', jwtSecret);
|
||||
envMap.set('DOMAIN', data.domain || 'example.com');
|
||||
envMap.set('STORAGE_PATH', data.storagePath || envMap.get('STORAGE_PATH') || rootFolderHost);
|
||||
envMap.set('POSTGRES_HOST', 'tipi-db');
|
||||
envMap.set('POSTGRES_DBNAME', 'tipi');
|
||||
envMap.set('POSTGRES_USERNAME', 'tipi');
|
||||
envMap.set('POSTGRES_PORT', String(5432));
|
||||
envMap.set('REDIS_HOST', 'tipi-redis');
|
||||
envMap.set('DEMO_MODE', String(data.demoMode || 'false'));
|
||||
envMap.set('GUEST_DASHBOARD', String(data.guestDashboard || 'false'));
|
||||
envMap.set('LOCAL_DOMAIN', data.localDomain || 'tipi.lan');
|
||||
envMap.set('NODE_ENV', 'production');
|
||||
|
||||
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
||||
|
||||
return envMap;
|
||||
};
|
||||
|
||||
/**
|
||||
* Copies the system files from the assets folder to the current working directory
|
||||
*/
|
||||
export const copySystemFiles = async () => {
|
||||
// Remove old unused files
|
||||
if (await pathExists(path.join(ROOT_FOLDER, 'scripts'))) {
|
||||
logger.info('Removing old scripts folder');
|
||||
await fs.promises.rmdir(path.join(ROOT_FOLDER, 'scripts'), { recursive: true });
|
||||
}
|
||||
|
||||
const assetsFolder = path.join(ROOT_FOLDER, 'assets');
|
||||
|
||||
// Copy traefik folder from assets
|
||||
logger.info('Creating traefik folders');
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'dynamic'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'shared'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'tls'), { recursive: true });
|
||||
|
||||
logger.info('Copying traefik files');
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'traefik.yml'), path.join(ROOT_FOLDER, 'traefik', 'traefik.yml'));
|
||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'dynamic', 'dynamic.yml'), path.join(ROOT_FOLDER, 'traefik', 'dynamic', 'dynamic.yml'));
|
||||
|
||||
// Create base folders
|
||||
logger.info('Creating base folders');
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'apps'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'app-data'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'state'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'repos'), { recursive: true });
|
||||
|
||||
// Create media folders
|
||||
logger.info('Creating media folders');
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'watch'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'complete'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'incomplete'), { recursive: true });
|
||||
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'books'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'comics'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'movies'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'music'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'tv'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'podcasts'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'images'), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'roms'), { recursive: true });
|
||||
};
|
||||
|
||||
/**
|
||||
* Given a domain, generates the TLS certificates for it to be used with Traefik
|
||||
*
|
||||
* @param {string} data.domain The domain to generate the certificates for
|
||||
*/
|
||||
export const generateTlsCertificates = async (data: { domain?: string }) => {
|
||||
if (!data.domain) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the certificate already exists, don't generate it again
|
||||
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', `${data.domain}.txt`))) {
|
||||
logger.info(`TLS certificate for ${data.domain} already exists`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove old certificates
|
||||
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', 'cert.pem'))) {
|
||||
logger.info('Removing old TLS certificate');
|
||||
await fs.promises.unlink(path.join(ROOT_FOLDER, 'traefik', 'tls', 'cert.pem'));
|
||||
}
|
||||
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', 'key.pem'))) {
|
||||
logger.info('Removing old TLS key');
|
||||
await fs.promises.unlink(path.join(ROOT_FOLDER, 'traefik', 'tls', 'key.pem'));
|
||||
}
|
||||
|
||||
const subject = `/O=runtipi.io/OU=IT/CN=*.${data.domain}/emailAddress=webmaster@${data.domain}`;
|
||||
const subjectAltName = `DNS:*.${data.domain},DNS:${data.domain}`;
|
||||
|
||||
try {
|
||||
logger.info(`Generating TLS certificate for ${data.domain}`);
|
||||
await execAsync(`openssl req -x509 -newkey rsa:4096 -keyout traefik/tls/key.pem -out traefik/tls/cert.pem -days 365 -subj "${subject}" -addext "subjectAltName = ${subjectAltName}" -nodes`);
|
||||
logger.info(`Writing txt file for ${data.domain}`);
|
||||
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'traefik', 'tls', `${data.domain}.txt`), '');
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
export const ensureFilePermissions = async () => {
|
||||
const filesAndFolders = [
|
||||
path.join(ROOT_FOLDER, 'apps'),
|
||||
path.join(ROOT_FOLDER, 'logs'),
|
||||
path.join(ROOT_FOLDER, 'repos'),
|
||||
path.join(ROOT_FOLDER, 'state'),
|
||||
path.join(ROOT_FOLDER, 'traefik'),
|
||||
path.join(ROOT_FOLDER, '.env'),
|
||||
path.join(ROOT_FOLDER, 'VERSION'),
|
||||
path.join(ROOT_FOLDER, 'docker-compose.yml'),
|
||||
];
|
||||
|
||||
const files600 = [path.join(ROOT_FOLDER, 'traefik', 'shared', 'acme.json')];
|
||||
|
||||
// Give permission to read and write to all files and folders for the current user
|
||||
for (const fileOrFolder of filesAndFolders) {
|
||||
if (await pathExists(fileOrFolder)) {
|
||||
await execAsync(`chmod -R a+rwx ${fileOrFolder}`).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
for (const fileOrFolder of files600) {
|
||||
if (await pathExists(fileOrFolder)) {
|
||||
await execAsync(`chmod 600 ${fileOrFolder}`).catch(() => {});
|
||||
}
|
||||
}
|
||||
};
|
286
packages/worker/src/services/app/app.executors.ts
Normal file
286
packages/worker/src/services/app/app.executors.ts
Normal file
|
@ -0,0 +1,286 @@
|
|||
/* eslint-disable no-await-in-loop */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { execAsync, pathExists } from '@runtipi/shared';
|
||||
import { copyDataDir, generateEnvFile } from './app.helpers';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { compose } from '@/lib/docker';
|
||||
import { getEnv } from '@/lib/environment';
|
||||
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||
|
||||
const getDbClient = async () => {
|
||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getEnv();
|
||||
|
||||
const client = new pg.Client({
|
||||
host: postgresHost,
|
||||
database: postgresDatabase,
|
||||
user: postgresUsername,
|
||||
password: postgresPassword,
|
||||
port: Number(postgresPort),
|
||||
});
|
||||
|
||||
await client.connect();
|
||||
|
||||
return client;
|
||||
};
|
||||
|
||||
export class AppExecutors {
|
||||
private readonly logger;
|
||||
|
||||
constructor() {
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
private handleAppError = (err: unknown) => {
|
||||
if (err instanceof Error) {
|
||||
this.logger.error(`An error occurred: ${err.message}`);
|
||||
return { success: false, message: err.message };
|
||||
}
|
||||
|
||||
return { success: false, message: `An error occurred: ${err}` };
|
||||
};
|
||||
|
||||
private getAppPaths = (appId: string) => {
|
||||
const { appsRepoId } = getEnv();
|
||||
|
||||
const appDataDirPath = path.join(STORAGE_FOLDER, 'app-data', appId);
|
||||
const appDirPath = path.join(ROOT_FOLDER, 'apps', appId);
|
||||
const configJsonPath = path.join(appDirPath, 'config.json');
|
||||
const repoPath = path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', appId);
|
||||
|
||||
return { appDataDirPath, appDirPath, configJsonPath, repoPath };
|
||||
};
|
||||
|
||||
/**
|
||||
* Given an app id, ensures that the app folder exists in the apps folder
|
||||
* If not, copies the app folder from the repo
|
||||
* @param {string} appId - App id
|
||||
*/
|
||||
private ensureAppDir = async (appId: string) => {
|
||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||
const dockerFilePath = path.join(ROOT_FOLDER, 'apps', appId, 'docker-compose.yml');
|
||||
|
||||
if (!(await pathExists(dockerFilePath))) {
|
||||
// delete eventual app folder if exists
|
||||
this.logger.info(`Deleting app ${appId} folder if exists`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
// Copy app folder from repo
|
||||
this.logger.info(`Copying app ${appId} from repo ${getEnv().appsRepoId}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Install an app from the repo
|
||||
* @param {string} appId - The id of the app to install
|
||||
* @param {Record<string, unknown>} config - The config of the app
|
||||
*/
|
||||
public installApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
if (process.getuid && process.getgid) {
|
||||
this.logger.info(`Installing app ${appId} as User ID: ${process.getuid()}, Group ID: ${process.getgid()}`);
|
||||
} else {
|
||||
this.logger.info(`Installing app ${appId}. No User ID or Group ID found.`);
|
||||
}
|
||||
|
||||
const { appsRepoId } = getEnv();
|
||||
|
||||
const { appDirPath, repoPath, appDataDirPath } = this.getAppPaths(appId);
|
||||
|
||||
// Check if app exists in repo
|
||||
const apps = await fs.promises.readdir(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps'));
|
||||
|
||||
if (!apps.includes(appId)) {
|
||||
this.logger.error(`App ${appId} not found in repo ${appsRepoId}`);
|
||||
return { success: false, message: `App ${appId} not found in repo ${appsRepoId}` };
|
||||
}
|
||||
|
||||
// Delete app folder if exists
|
||||
this.logger.info(`Deleting folder ${appDirPath} if exists`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
// Create app folder
|
||||
this.logger.info(`Creating folder ${appDirPath}`);
|
||||
await fs.promises.mkdir(appDirPath, { recursive: true });
|
||||
|
||||
// Copy app folder from repo
|
||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
|
||||
// Create folder app-data folder
|
||||
this.logger.info(`Creating folder ${appDataDirPath}`);
|
||||
await fs.promises.mkdir(appDataDirPath, { recursive: true });
|
||||
|
||||
// Create app.env file
|
||||
this.logger.info(`Creating app.env file for app ${appId}`);
|
||||
await generateEnvFile(appId, config);
|
||||
|
||||
// Copy data dir
|
||||
this.logger.info(`Copying data dir for app ${appId}`);
|
||||
if (!(await pathExists(`${appDataDirPath}/data`))) {
|
||||
await copyDataDir(appId);
|
||||
}
|
||||
|
||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||
});
|
||||
|
||||
// run docker-compose up
|
||||
this.logger.info(`Running docker-compose up for app ${appId}`);
|
||||
await compose(appId, 'up -d');
|
||||
|
||||
this.logger.info(`Docker-compose up for app ${appId} finished`);
|
||||
|
||||
return { success: true, message: `App ${appId} installed successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Stops an app
|
||||
* @param {string} appId - The id of the app to stop
|
||||
* @param {Record<string, unknown>} config - The config of the app
|
||||
*/
|
||||
public stopApp = async (appId: string, config: Record<string, unknown>, skipEnvGeneration = false) => {
|
||||
try {
|
||||
this.logger.info(`Stopping app ${appId}`);
|
||||
|
||||
await this.ensureAppDir(appId);
|
||||
|
||||
if (!skipEnvGeneration) {
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await generateEnvFile(appId, config);
|
||||
}
|
||||
await compose(appId, 'rm --force --stop');
|
||||
|
||||
this.logger.info(`App ${appId} stopped`);
|
||||
return { success: true, message: `App ${appId} stopped successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public startApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
const { appDataDirPath } = this.getAppPaths(appId);
|
||||
|
||||
this.logger.info(`Starting app ${appId}`);
|
||||
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
|
||||
await compose(appId, 'up --detach --force-recreate --remove-orphans --pull always');
|
||||
|
||||
this.logger.info(`App ${appId} started`);
|
||||
|
||||
this.logger.info(`Setting permissions for app ${appId}`);
|
||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||
});
|
||||
|
||||
return { success: true, message: `App ${appId} started successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public uninstallApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
const { appDirPath, appDataDirPath } = this.getAppPaths(appId);
|
||||
this.logger.info(`Uninstalling app ${appId}`);
|
||||
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
await compose(appId, 'down --remove-orphans --volumes --rmi all');
|
||||
|
||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true }).catch((err) => {
|
||||
this.logger.error(`Error deleting folder ${appDirPath}: ${err.message}`);
|
||||
});
|
||||
|
||||
this.logger.info(`Deleting folder ${appDataDirPath}`);
|
||||
await fs.promises.rm(appDataDirPath, { recursive: true, force: true }).catch((err) => {
|
||||
this.logger.error(`Error deleting folder ${appDataDirPath}: ${err.message}`);
|
||||
});
|
||||
|
||||
this.logger.info(`App ${appId} uninstalled`);
|
||||
return { success: true, message: `App ${appId} uninstalled successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public updateApp = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||
this.logger.info(`Updating app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
|
||||
await compose(appId, 'up --detach --force-recreate --remove-orphans');
|
||||
await compose(appId, 'down --rmi all --remove-orphans');
|
||||
|
||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||
|
||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||
|
||||
await compose(appId, 'pull');
|
||||
|
||||
return { success: true, message: `App ${appId} updated successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
public regenerateAppEnv = async (appId: string, config: Record<string, unknown>) => {
|
||||
try {
|
||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||
await this.ensureAppDir(appId);
|
||||
await generateEnvFile(appId, config);
|
||||
return { success: true, message: `App ${appId} env file regenerated successfully` };
|
||||
} catch (err) {
|
||||
return this.handleAppError(err);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Start all apps with status running
|
||||
*/
|
||||
public startAllApps = async () => {
|
||||
const client = await getDbClient();
|
||||
|
||||
try {
|
||||
// Get all apps with status running
|
||||
const { rows } = await client.query(`SELECT * FROM app WHERE status = 'running'`);
|
||||
|
||||
// Update all apps with status different than running or stopped to stopped
|
||||
await client.query(`UPDATE app SET status = 'stopped' WHERE status != 'stopped' AND status != 'running' AND status != 'missing'`);
|
||||
|
||||
// Start all apps
|
||||
for (const row of rows) {
|
||||
const { id, config } = row;
|
||||
|
||||
const { success } = await this.startApp(id, config);
|
||||
|
||||
if (!success) {
|
||||
this.logger.error(`Error starting app ${id}`);
|
||||
await client.query(`UPDATE app SET status = 'stopped' WHERE id = '${id}'`);
|
||||
} else {
|
||||
await client.query(`UPDATE app SET status = 'running' WHERE id = '${id}'`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(`Error starting apps: ${err}`);
|
||||
} finally {
|
||||
await client.end();
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,11 +1,10 @@
|
|||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { appInfoSchema, envMapToString, envStringToMap } from '@runtipi/shared';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { appInfoSchema, envMapToString, envStringToMap, execAsync, pathExists } from '@runtipi/shared';
|
||||
import { generateVapidKeys, getAppEnvMap } from './env.helpers';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
import { getEnv } from '@/lib/environment';
|
||||
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||
|
||||
/**
|
||||
* This function generates a random string of the provided length by using the SHA-256 hash algorithm.
|
||||
|
@ -17,7 +16,7 @@ import { execAsync } from '@/utils/exec-async/execAsync';
|
|||
*/
|
||||
const getEntropy = async (name: string, length: number) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const seed = await fs.promises.readFile(path.join(getEnv().rootFolderHost, 'state', 'seed'));
|
||||
const seed = await fs.promises.readFile(path.join(ROOT_FOLDER, 'state', 'seed'));
|
||||
|
||||
hash.update(name + seed.toString());
|
||||
return hash.digest('hex').substring(0, length);
|
||||
|
@ -36,16 +35,16 @@ const getEntropy = async (name: string, length: number) => {
|
|||
* @throws Will throw an error if the app has an invalid config.json file or if a required variable is missing.
|
||||
*/
|
||||
export const generateEnvFile = async (appId: string, config: Record<string, unknown>) => {
|
||||
const { rootFolderHost, storagePath, internalIp } = getEnv();
|
||||
const { internalIp, storagePath, rootFolderHost } = getEnv();
|
||||
|
||||
const configFile = await fs.promises.readFile(path.join(rootFolderHost, 'apps', appId, 'config.json'));
|
||||
const configFile = await fs.promises.readFile(path.join(ROOT_FOLDER, 'apps', appId, 'config.json'));
|
||||
const parsedConfig = appInfoSchema.safeParse(JSON.parse(configFile.toString()));
|
||||
|
||||
if (!parsedConfig.success) {
|
||||
throw new Error(`App ${appId} has invalid config.json file`);
|
||||
}
|
||||
|
||||
const baseEnvFile = await fs.promises.readFile(path.join(rootFolderHost, '.env'));
|
||||
const baseEnvFile = await fs.promises.readFile(path.join(ROOT_FOLDER, '.env'));
|
||||
const envMap = envStringToMap(baseEnvFile.toString());
|
||||
|
||||
// Default always present env variables
|
||||
|
@ -101,12 +100,12 @@ export const generateEnvFile = async (appId: string, config: Record<string, unkn
|
|||
}
|
||||
|
||||
// Create app-data folder if it doesn't exist
|
||||
const appDataDirectoryExists = await fs.promises.stat(path.join(storagePath, 'app-data', appId)).catch(() => false);
|
||||
const appDataDirectoryExists = await fs.promises.stat(path.join(STORAGE_FOLDER, 'app-data', appId)).catch(() => false);
|
||||
if (!appDataDirectoryExists) {
|
||||
await fs.promises.mkdir(path.join(storagePath, 'app-data', appId), { recursive: true });
|
||||
await fs.promises.mkdir(path.join(STORAGE_FOLDER, 'app-data', appId), { recursive: true });
|
||||
}
|
||||
|
||||
await fs.promises.writeFile(path.join(storagePath, 'app-data', appId, 'app.env'), envMapToString(envMap));
|
||||
await fs.promises.writeFile(path.join(STORAGE_FOLDER, 'app-data', appId, 'app.env'), envMapToString(envMap));
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -133,40 +132,38 @@ const renderTemplate = (template: string, envMap: Map<string, string>) => {
|
|||
* @param {string} id - The id of the app.
|
||||
*/
|
||||
export const copyDataDir = async (id: string) => {
|
||||
const { rootFolderHost, storagePath } = getEnv();
|
||||
|
||||
const envMap = await getAppEnvMap(id);
|
||||
|
||||
// return if app does not have a data directory
|
||||
if (!(await pathExists(`${rootFolderHost}/apps/${id}/data`))) {
|
||||
if (!(await pathExists(`${ROOT_FOLDER}/apps/${id}/data`))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create app-data folder if it doesn't exist
|
||||
if (!(await pathExists(`${storagePath}/app-data/${id}/data`))) {
|
||||
await fs.promises.mkdir(`${storagePath}/app-data/${id}/data`, { recursive: true });
|
||||
if (!(await pathExists(`${STORAGE_FOLDER}/app-data/${id}/data`))) {
|
||||
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${id}/data`, { recursive: true });
|
||||
}
|
||||
|
||||
const dataDir = await fs.promises.readdir(`${rootFolderHost}/apps/${id}/data`);
|
||||
const dataDir = await fs.promises.readdir(`${ROOT_FOLDER}/apps/${id}/data`);
|
||||
|
||||
const processFile = async (file: string) => {
|
||||
if (file.endsWith('.template')) {
|
||||
const template = await fs.promises.readFile(`${rootFolderHost}/apps/${id}/data/${file}`, 'utf-8');
|
||||
const template = await fs.promises.readFile(`${ROOT_FOLDER}/apps/${id}/data/${file}`, 'utf-8');
|
||||
const renderedTemplate = renderTemplate(template, envMap);
|
||||
|
||||
await fs.promises.writeFile(`${storagePath}/app-data/${id}/data/${file.replace('.template', '')}`, renderedTemplate);
|
||||
await fs.promises.writeFile(`${STORAGE_FOLDER}/app-data/${id}/data/${file.replace('.template', '')}`, renderedTemplate);
|
||||
} else {
|
||||
await fs.promises.copyFile(`${rootFolderHost}/apps/${id}/data/${file}`, `${storagePath}/app-data/${id}/data/${file}`);
|
||||
await fs.promises.copyFile(`${ROOT_FOLDER}/apps/${id}/data/${file}`, `${STORAGE_FOLDER}/app-data/${id}/data/${file}`);
|
||||
}
|
||||
};
|
||||
|
||||
const processDir = async (p: string) => {
|
||||
await fs.promises.mkdir(`${storagePath}/app-data/${id}/data/${p}`, { recursive: true });
|
||||
const files = await fs.promises.readdir(`${rootFolderHost}/apps/${id}/data/${p}`);
|
||||
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${id}/data/${p}`, { recursive: true });
|
||||
const files = await fs.promises.readdir(`${ROOT_FOLDER}/apps/${id}/data/${p}`);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async (file) => {
|
||||
const fullPath = `${rootFolderHost}/apps/${id}/data/${p}/${file}`;
|
||||
const fullPath = `${ROOT_FOLDER}/apps/${id}/data/${p}/${file}`;
|
||||
|
||||
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
||||
await processDir(`${p}/${file}`);
|
||||
|
@ -179,7 +176,7 @@ export const copyDataDir = async (id: string) => {
|
|||
|
||||
await Promise.all(
|
||||
dataDir.map(async (file) => {
|
||||
const fullPath = `${rootFolderHost}/apps/${id}/data/${file}`;
|
||||
const fullPath = `${ROOT_FOLDER}/apps/${id}/data/${file}`;
|
||||
|
||||
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
||||
await processDir(file);
|
||||
|
@ -190,7 +187,7 @@ export const copyDataDir = async (id: string) => {
|
|||
);
|
||||
|
||||
// Remove any .gitkeep files from the app-data folder at any level
|
||||
if (await pathExists(`${storagePath}/app-data/${id}/data`)) {
|
||||
await execAsync(`find ${storagePath}/app-data/${id}/data -name .gitkeep -delete`).catch(() => {});
|
||||
if (await pathExists(`${STORAGE_FOLDER}/app-data/${id}/data`)) {
|
||||
await execAsync(`find ${STORAGE_FOLDER}/app-data/${id}/data -name .gitkeep -delete`).catch(() => {});
|
||||
}
|
||||
};
|
|
@ -1,7 +1,7 @@
|
|||
import webpush from 'web-push';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { STORAGE_FOLDER } from '@/config/constants';
|
||||
|
||||
/**
|
||||
* This function reads the env file for the app with the provided id and returns a Map containing the key-value pairs of the environment variables.
|
||||
|
@ -11,7 +11,7 @@ import { getEnv } from '@/utils/environment/environment';
|
|||
*/
|
||||
export const getAppEnvMap = async (appId: string) => {
|
||||
try {
|
||||
const envFile = await fs.promises.readFile(path.join(getEnv().storagePath, 'app-data', appId, 'app.env'));
|
||||
const envFile = await fs.promises.readFile(path.join(STORAGE_FOLDER, 'app-data', appId, 'app.env'));
|
||||
const envVars = envFile.toString().split('\n');
|
||||
const envVarsMap = new Map<string, string>();
|
||||
|
3
packages/worker/src/services/index.ts
Normal file
3
packages/worker/src/services/index.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
export { AppExecutors } from './app/app.executors';
|
||||
export { RepoExecutors } from './repo/repo.executors';
|
||||
export { SystemExecutors } from './system/system.executors';
|
|
@ -1,15 +1,13 @@
|
|||
import { getEnv } from 'src/utils/environment/environment';
|
||||
import path from 'path';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { execAsync, pathExists } from '@runtipi/shared';
|
||||
import { getRepoHash } from './repo.helpers';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
export class RepoExecutors {
|
||||
private readonly logger;
|
||||
|
||||
constructor() {
|
||||
this.logger = fileLogger;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -32,10 +30,8 @@ export class RepoExecutors {
|
|||
*/
|
||||
public cloneRepo = async (repoUrl: string) => {
|
||||
try {
|
||||
const { rootFolderHost } = getEnv();
|
||||
|
||||
const repoHash = getRepoHash(repoUrl);
|
||||
const repoPath = path.join(rootFolderHost, 'repos', repoHash);
|
||||
const repoPath = path.join('/app', 'repos', repoHash);
|
||||
|
||||
if (await pathExists(repoPath)) {
|
||||
this.logger.info(`Repo ${repoUrl} already exists`);
|
||||
|
@ -60,10 +56,8 @@ export class RepoExecutors {
|
|||
*/
|
||||
public pullRepo = async (repoUrl: string) => {
|
||||
try {
|
||||
const { rootFolderHost } = getEnv();
|
||||
|
||||
const repoHash = getRepoHash(repoUrl);
|
||||
const repoPath = path.join(rootFolderHost, 'repos', repoHash);
|
||||
const repoPath = path.join('/app', 'repos', repoHash);
|
||||
|
||||
if (!(await pathExists(repoPath))) {
|
||||
this.logger.info(`Repo ${repoUrl} does not exist`);
|
61
packages/worker/src/services/system/system.executors.ts
Normal file
61
packages/worker/src/services/system/system.executors.ts
Normal file
|
@ -0,0 +1,61 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import si from 'systeminformation';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { ROOT_FOLDER } from '@/config/constants';
|
||||
|
||||
export class SystemExecutors {
|
||||
private readonly logger;
|
||||
|
||||
constructor() {
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
private handleSystemError = (err: unknown) => {
|
||||
if (err instanceof Error) {
|
||||
this.logger.error(`An error occurred: ${err.message}`);
|
||||
return { success: false, message: err.message };
|
||||
}
|
||||
this.logger.error(`An error occurred: ${err}`);
|
||||
|
||||
return { success: false, message: `An error occurred: ${err}` };
|
||||
};
|
||||
|
||||
private getSystemLoad = async () => {
|
||||
const { currentLoad } = await si.currentLoad();
|
||||
|
||||
const memResult = { total: 0, used: 0, available: 0 };
|
||||
|
||||
try {
|
||||
const memInfo = await fs.promises.readFile('/host/proc/meminfo');
|
||||
|
||||
memResult.total = Number(memInfo.toString().match(/MemTotal:\s+(\d+)/)?.[1] ?? 0) * 1024;
|
||||
memResult.available = Number(memInfo.toString().match(/MemAvailable:\s+(\d+)/)?.[1] ?? 0) * 1024;
|
||||
memResult.used = memResult.total - memResult.available;
|
||||
} catch (e) {
|
||||
this.logger.error(`Unable to read /host/proc/meminfo: ${e}`);
|
||||
}
|
||||
|
||||
const disks = await si.fsSize();
|
||||
const disk0 = disks.find((disk) => disk.mount.startsWith('/mnt/host') && disk.type === 'fakeowner');
|
||||
|
||||
return {
|
||||
cpu: { load: currentLoad },
|
||||
memory: memResult,
|
||||
disk: { total: disk0?.size, used: disk0?.used, available: disk0?.available },
|
||||
};
|
||||
};
|
||||
|
||||
public systemInfo = async () => {
|
||||
try {
|
||||
const systemLoad = await this.getSystemLoad();
|
||||
|
||||
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'state', 'system-info.json'), JSON.stringify(systemLoad, null, 2));
|
||||
await fs.promises.chmod(path.join(ROOT_FOLDER, 'state', 'system-info.json'), 0o777);
|
||||
|
||||
return { success: true, message: '' };
|
||||
} catch (e) {
|
||||
return this.handleSystemError(e);
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,18 +1,13 @@
|
|||
import { eventSchema } from '@runtipi/shared';
|
||||
import { Worker } from 'bullmq';
|
||||
import { AppExecutors, RepoExecutors, SystemExecutors } from '@/executors';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { getUserIds } from '@/utils/environment/user';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||
import { AppExecutors, RepoExecutors, SystemExecutors } from '@/services';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getEnv } from '@/lib/environment';
|
||||
|
||||
const runCommand = async (jobData: unknown) => {
|
||||
const { gid, uid } = getUserIds();
|
||||
fileLogger.info(`Running command with uid ${uid} and gid ${gid}`);
|
||||
|
||||
const { installApp, startApp, stopApp, uninstallApp, updateApp, regenerateAppEnv } = new AppExecutors();
|
||||
const { cloneRepo, pullRepo } = new RepoExecutors();
|
||||
const { systemInfo, restart, update } = new SystemExecutors();
|
||||
const { systemInfo } = new SystemExecutors();
|
||||
|
||||
const event = eventSchema.safeParse(jobData);
|
||||
|
||||
|
@ -61,38 +56,11 @@ const runCommand = async (jobData: unknown) => {
|
|||
if (data.command === 'system_info') {
|
||||
({ success, message } = await systemInfo());
|
||||
}
|
||||
|
||||
if (data.command === 'restart') {
|
||||
({ success, message } = await restart());
|
||||
}
|
||||
|
||||
if (data.command === 'update') {
|
||||
({ success, message } = await update(data.version));
|
||||
}
|
||||
}
|
||||
|
||||
return { success, message };
|
||||
};
|
||||
|
||||
export const killOtherWorkers = async () => {
|
||||
const { stdout } = await execAsync('ps aux | grep "index.js watch" | grep -v grep | awk \'{print $2}\'');
|
||||
const { stdout: stdoutInherit } = await execAsync('ps aux | grep "runtipi-cli watch" | grep -v grep | awk \'{print $2}\'');
|
||||
|
||||
fileLogger.info(`Killing other workers with pids ${stdout} and ${stdoutInherit}`);
|
||||
|
||||
const pids = stdout.split('\n').filter((pid: string) => pid !== '');
|
||||
const pidsInherit = stdoutInherit.split('\n').filter((pid: string) => pid !== '');
|
||||
|
||||
pids.concat(pidsInherit).forEach((pid) => {
|
||||
fileLogger.info(`Killing worker with pid ${pid}`);
|
||||
try {
|
||||
process.kill(Number(pid));
|
||||
} catch (e) {
|
||||
fileLogger.error(`Error killing worker with pid ${pid}: ${e}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Start the worker for the events queue
|
||||
*/
|
||||
|
@ -100,27 +68,27 @@ export const startWorker = async () => {
|
|||
const worker = new Worker(
|
||||
'events',
|
||||
async (job) => {
|
||||
fileLogger.info(`Processing job ${job.id} with data ${JSON.stringify(job.data)}`);
|
||||
logger.info(`Processing job ${job.id} with data ${JSON.stringify(job.data)}`);
|
||||
const { message, success } = await runCommand(job.data);
|
||||
|
||||
return { success, stdout: message };
|
||||
},
|
||||
{ connection: { host: '127.0.0.1', port: 6379, password: getEnv().redisPassword, connectTimeout: 60000 }, removeOnComplete: { count: 200 }, removeOnFail: { count: 500 } },
|
||||
{ connection: { host: getEnv().redisHost, port: 6379, password: getEnv().redisPassword, connectTimeout: 60000 }, removeOnComplete: { count: 200 }, removeOnFail: { count: 500 } },
|
||||
);
|
||||
|
||||
worker.on('ready', () => {
|
||||
fileLogger.info('Worker is ready');
|
||||
logger.info('Worker is ready');
|
||||
});
|
||||
|
||||
worker.on('completed', (job) => {
|
||||
fileLogger.info(`Job ${job.id} completed with result:`, JSON.stringify(job.returnvalue));
|
||||
logger.info(`Job ${job.id} completed with result:`, JSON.stringify(job.returnvalue));
|
||||
});
|
||||
|
||||
worker.on('failed', (job) => {
|
||||
fileLogger.error(`Job ${job?.id} failed with reason ${job?.failedReason}`);
|
||||
logger.error(`Job ${job?.id} failed with reason ${job?.failedReason}`);
|
||||
});
|
||||
|
||||
worker.on('error', async (e) => {
|
||||
fileLogger.debug(`Worker error: ${e}`);
|
||||
logger.debug(`Worker error: ${e}`);
|
||||
});
|
||||
};
|
Loading…
Reference in a new issue