refactor: move db migrations in cli start command
This commit is contained in:
parent
fe3dabce7d
commit
ca8e05652c
18 changed files with 145 additions and 1773 deletions
|
@ -6,6 +6,7 @@ POSTGRES_PORT=5433
|
|||
APPS_REPO_ID=repo-id
|
||||
APPS_REPO_URL=https://test.com/test
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PASSWORD=redis
|
||||
INTERNAL_IP=localhost
|
||||
TIPI_VERSION=1
|
||||
JWT_SECRET=secret
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
"vitest": "^0.32.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@runtipi/postgres-migrations": "^5.3.0",
|
||||
"@runtipi/shared": "workspace:^",
|
||||
"axios": "^1.4.0",
|
||||
"boxen": "^7.1.1",
|
||||
|
@ -53,6 +54,7 @@
|
|||
"commander": "^11.0.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"log-update": "^5.0.1",
|
||||
"pg": "^8.11.1",
|
||||
"semver": "^7.5.3",
|
||||
"systeminformation": "^5.18.7",
|
||||
"web-push": "^3.6.3",
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { Queue } from 'bullmq';
|
||||
import fs from 'fs';
|
||||
import cliProgress from 'cli-progress';
|
||||
import semver from 'semver';
|
||||
|
@ -9,12 +10,14 @@ import si from 'systeminformation';
|
|||
import { Stream } from 'stream';
|
||||
import { promisify } from 'util';
|
||||
import dotenv from 'dotenv';
|
||||
import { SystemEvent } from '@runtipi/shared';
|
||||
import { AppExecutors } from '../app/app.executors';
|
||||
import { copySystemFiles, generateSystemEnvFile, generateTlsCertificates } from './system.helpers';
|
||||
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
||||
import { pathExists } from '@/utils/fs-helpers';
|
||||
import { getEnv } from '@/utils/environment/environment';
|
||||
import { fileLogger } from '@/utils/logger/file-logger';
|
||||
import { runPostgresMigrations } from '@/utils/migrations/run-migration';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
|
@ -203,6 +206,32 @@ export class SystemExecutors {
|
|||
|
||||
spinner.done('Watcher started');
|
||||
|
||||
const queue = new Queue('events', { connection: { host: '127.0.0.1', port: 6379, password: envMap.get('REDIS_PASSWORD') } });
|
||||
await queue.obliterate({ force: true });
|
||||
|
||||
// Initial jobs
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent);
|
||||
await queue.add(`${Math.random().toString()}_repo_clone`, { type: 'repo', command: 'clone', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||
|
||||
// Scheduled jobs
|
||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent, { repeat: { pattern: '*/30 * * * *' } });
|
||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent, { repeat: { pattern: '* * * * *' } });
|
||||
|
||||
await queue.close();
|
||||
|
||||
spinner.setMessage('Running database migrations...');
|
||||
spinner.start();
|
||||
|
||||
await runPostgresMigrations({
|
||||
postgresHost: '127.0.0.1',
|
||||
postgresDatabase: envMap.get('POSTGRES_DBNAME') as string,
|
||||
postgresUsername: envMap.get('POSTGRES_USERNAME') as string,
|
||||
postgresPassword: envMap.get('POSTGRES_PASSWORD') as string,
|
||||
postgresPort: envMap.get('POSTGRES_PORT') as string,
|
||||
});
|
||||
|
||||
spinner.done('Database migrations complete');
|
||||
|
||||
console.log(
|
||||
boxen(`Visit: http://${envMap.get('INTERNAL_IP')}:${envMap.get('NGINX_PORT')} to access the dashboard\n\nFind documentation and guides at: https://runtipi.io`, {
|
||||
title: 'Tipi successfully started 🎉',
|
||||
|
|
56
packages/cli/src/utils/migrations/run-migration.ts
Normal file
56
packages/cli/src/utils/migrations/run-migration.ts
Normal file
|
@ -0,0 +1,56 @@
|
|||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { migrate } from '@runtipi/postgres-migrations';
|
||||
import { fileLogger } from '../logger/file-logger';
|
||||
|
||||
type MigrationParams = {
|
||||
postgresHost: string;
|
||||
postgresDatabase: string;
|
||||
postgresUsername: string;
|
||||
postgresPassword: string;
|
||||
postgresPort: string;
|
||||
};
|
||||
|
||||
export const runPostgresMigrations = async (params: MigrationParams) => {
|
||||
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
||||
|
||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = params;
|
||||
|
||||
fileLogger.info('Starting database migration');
|
||||
|
||||
fileLogger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||
|
||||
const client = new pg.Client({
|
||||
user: postgresUsername,
|
||||
host: postgresHost,
|
||||
database: postgresDatabase,
|
||||
password: postgresPassword,
|
||||
port: Number(postgresPort),
|
||||
});
|
||||
await client.connect();
|
||||
|
||||
fileLogger.info('Client connected');
|
||||
|
||||
try {
|
||||
const { rows } = await client.query('SELECT * FROM migrations');
|
||||
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
||||
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
||||
fileLogger.info('Found legacy migration. Deleting table migrations');
|
||||
await client.query('DROP TABLE migrations');
|
||||
}
|
||||
} catch (e) {
|
||||
fileLogger.info('Migrations table not found, creating it');
|
||||
}
|
||||
|
||||
fileLogger.info('Running migrations');
|
||||
try {
|
||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||
} catch (e) {
|
||||
fileLogger.error('Error running migrations. Dropping table migrations and trying again');
|
||||
await client.query('DROP TABLE migrations');
|
||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||
}
|
||||
|
||||
fileLogger.info('Migration complete');
|
||||
await client.end();
|
||||
};
|
1718
pnpm-lock.yaml
1718
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
|
@ -4,10 +4,8 @@ import express from 'express';
|
|||
import { parse } from 'url';
|
||||
|
||||
import type { NextServer } from 'next/dist/server/next';
|
||||
import { EventDispatcher } from './core/EventDispatcher';
|
||||
import { getConfig, setConfig } from './core/TipiConfig';
|
||||
import { Logger } from './core/Logger';
|
||||
import { runPostgresMigrations } from './run-migration';
|
||||
import { AppServiceClass } from './services/apps/apps.service';
|
||||
import { db } from './db';
|
||||
import { sessionMiddleware } from './middlewares/session.middleware';
|
||||
|
@ -61,23 +59,10 @@ nextApp.prepare().then(async () => {
|
|||
});
|
||||
|
||||
app.listen(port, async () => {
|
||||
await EventDispatcher.clear();
|
||||
const appService = new AppServiceClass(db);
|
||||
|
||||
// Run database migrations
|
||||
if (getConfig().NODE_ENV !== 'development') {
|
||||
await runPostgresMigrations();
|
||||
}
|
||||
setConfig('status', 'RUNNING');
|
||||
|
||||
// Clone and update apps repo
|
||||
await EventDispatcher.dispatchEventAsync({ type: 'repo', command: 'clone', url: getConfig().appsRepoUrl });
|
||||
await EventDispatcher.dispatchEventAsync({ type: 'repo', command: 'update', url: getConfig().appsRepoUrl });
|
||||
|
||||
// Scheduled events
|
||||
EventDispatcher.scheduleEvent({ type: 'repo', command: 'update', url: getConfig().appsRepoUrl }, '*/30 * * * *');
|
||||
EventDispatcher.scheduleEvent({ type: 'system', command: 'system_info' }, '* * * * *');
|
||||
|
||||
appService.startAllApps();
|
||||
|
||||
Logger.info(`> Server listening at http://localhost:${port} as ${dev ? 'development' : process.env.NODE_ENV}`);
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { migrate } from '@runtipi/postgres-migrations';
|
||||
import { Logger } from './core/Logger';
|
||||
import { getConfig } from './core/TipiConfig';
|
||||
|
||||
export const runPostgresMigrations = async (dbName?: string) => {
|
||||
Logger.info('Starting database migration');
|
||||
|
||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getConfig();
|
||||
|
||||
Logger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||
|
||||
const client = new pg.Client({
|
||||
user: postgresUsername,
|
||||
host: postgresHost,
|
||||
database: dbName || postgresDatabase,
|
||||
password: postgresPassword,
|
||||
port: Number(postgresPort),
|
||||
});
|
||||
await client.connect();
|
||||
|
||||
Logger.info('Client connected');
|
||||
|
||||
try {
|
||||
const { rows } = await client.query('SELECT * FROM migrations');
|
||||
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
||||
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
||||
Logger.info('Found legacy migration. Deleting table migrations');
|
||||
await client.query('DROP TABLE migrations');
|
||||
}
|
||||
} catch (e) {
|
||||
Logger.info('Migrations table not found, creating it');
|
||||
}
|
||||
|
||||
Logger.info('Running migrations');
|
||||
try {
|
||||
await migrate({ client }, path.join(__dirname, 'migrations'), { skipCreateMigrationTable: true });
|
||||
} catch (e) {
|
||||
Logger.error('Error running migrations. Dropping table migrations and trying again');
|
||||
await client.query('DROP TABLE migrations');
|
||||
await migrate({ client }, path.join(__dirname, 'migrations'), { skipCreateMigrationTable: true });
|
||||
}
|
||||
|
||||
Logger.info('Migration complete');
|
||||
await client.end();
|
||||
};
|
|
@ -1,4 +1,50 @@
|
|||
import { runPostgresMigrations } from './run-migration';
|
||||
import path from 'path';
|
||||
import pg from 'pg';
|
||||
import { migrate } from '@runtipi/postgres-migrations';
|
||||
import { Logger } from './core/Logger';
|
||||
import { getConfig } from './core/TipiConfig';
|
||||
|
||||
export const runPostgresMigrations = async (dbName?: string) => {
|
||||
Logger.info('Starting database migration');
|
||||
|
||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getConfig();
|
||||
|
||||
Logger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||
|
||||
const client = new pg.Client({
|
||||
user: postgresUsername,
|
||||
host: postgresHost,
|
||||
database: dbName || postgresDatabase,
|
||||
password: postgresPassword,
|
||||
port: Number(postgresPort),
|
||||
});
|
||||
await client.connect();
|
||||
|
||||
Logger.info('Client connected');
|
||||
|
||||
try {
|
||||
const { rows } = await client.query('SELECT * FROM migrations');
|
||||
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
||||
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
||||
Logger.info('Found legacy migration. Deleting table migrations');
|
||||
await client.query('DROP TABLE migrations');
|
||||
}
|
||||
} catch (e) {
|
||||
Logger.info('Migrations table not found, creating it');
|
||||
}
|
||||
|
||||
Logger.info('Running migrations');
|
||||
try {
|
||||
await migrate({ client }, path.join(__dirname, '../../packages/cli/assets/migrations'), { skipCreateMigrationTable: true });
|
||||
} catch (e) {
|
||||
Logger.error('Error running migrations. Dropping table migrations and trying again');
|
||||
await client.query('DROP TABLE migrations');
|
||||
await migrate({ client }, path.join(__dirname, '../../packages/cli/assets/migrations'), { skipCreateMigrationTable: true });
|
||||
}
|
||||
|
||||
Logger.info('Migration complete');
|
||||
await client.end();
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
await runPostgresMigrations();
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
import pg, { Pool } from 'pg';
|
||||
import { drizzle } from 'drizzle-orm/node-postgres';
|
||||
import { runPostgresMigrations } from '../run-migration';
|
||||
import { getConfig } from '../core/TipiConfig';
|
||||
import * as schema from '../db/schema';
|
||||
import { Database } from '../db';
|
||||
import { runPostgresMigrations } from '../run-migrations-dev';
|
||||
|
||||
export type TestDatabase = {
|
||||
client: Pool;
|
||||
|
|
Loading…
Reference in a new issue