Merge pull request #212 from meienberger/feature/inapp-config
feature: runtime config
This commit is contained in:
commit
a8db7bd179
47 changed files with 708 additions and 355 deletions
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -3,7 +3,7 @@ on:
|
|||
push:
|
||||
|
||||
env:
|
||||
ROOT_FOLDER: /test
|
||||
ROOT_FOLDER: /runtipi
|
||||
JWT_SECRET: "secret"
|
||||
ROOT_FOLDER_HOST: /tipi
|
||||
APPS_REPO_ID: repo-id
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,6 +1,7 @@
|
|||
*.swo
|
||||
*.swp
|
||||
|
||||
logs
|
||||
.pnpm-debug.log
|
||||
.env*
|
||||
github.secrets
|
||||
|
|
|
@ -1,9 +1,22 @@
|
|||
FROM alpine:3.16.0 as app
|
||||
FROM ubuntu:22.04
|
||||
|
||||
WORKDIR /
|
||||
|
||||
RUN apt-get update
|
||||
# Install docker
|
||||
RUN apk --no-cache add docker-compose nodejs npm bash g++ make git
|
||||
RUN apt-get install -y ca-certificates curl gnupg lsb-release
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
||||
RUN echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list >/dev/null
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||
|
||||
# Install node
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Install dependencies
|
||||
RUN apt-get install -y bash g++ make git
|
||||
|
||||
RUN npm install node-gyp -g
|
||||
|
||||
|
|
|
@ -53,14 +53,14 @@ services:
|
|||
volumes:
|
||||
## Docker sock
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ${PWD}:/tipi
|
||||
- ${PWD}:/runtipi
|
||||
- ${PWD}/packages/system-api/src:/api/src
|
||||
- ${PWD}/logs:/api/logs
|
||||
# - /api/node_modules
|
||||
environment:
|
||||
INTERNAL_IP: ${INTERNAL_IP}
|
||||
TIPI_VERSION: ${TIPI_VERSION}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
ROOT_FOLDER_HOST: ${ROOT_FOLDER_HOST}
|
||||
NGINX_PORT: ${NGINX_PORT}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_USERNAME: tipi
|
||||
|
|
|
@ -46,12 +46,12 @@ services:
|
|||
volumes:
|
||||
## Docker sock
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ${PWD}:/tipi
|
||||
- ${PWD}:/runtipi
|
||||
- ${PWD}/logs:/api/logs
|
||||
environment:
|
||||
INTERNAL_IP: ${INTERNAL_IP}
|
||||
TIPI_VERSION: ${TIPI_VERSION}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
ROOT_FOLDER_HOST: ${ROOT_FOLDER_HOST}
|
||||
NGINX_PORT: ${NGINX_PORT}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_USERNAME: tipi
|
||||
|
|
|
@ -46,12 +46,12 @@ services:
|
|||
volumes:
|
||||
## Docker sock
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ${PWD}:/tipi
|
||||
- ${PWD}:/runtipi
|
||||
- ${PWD}/logs:/api/logs
|
||||
environment:
|
||||
INTERNAL_IP: ${INTERNAL_IP}
|
||||
TIPI_VERSION: ${TIPI_VERSION}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
ROOT_FOLDER_HOST: ${ROOT_FOLDER_HOST}
|
||||
NGINX_PORT: ${NGINX_PORT}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_USERNAME: tipi
|
||||
|
|
0
media/data/music/.gitkeep
Normal file
0
media/data/music/.gitkeep
Normal file
|
@ -1,5 +1,5 @@
|
|||
/** @type {import('next').NextConfig} */
|
||||
const { INTERNAL_IP, DOMAIN } = process.env;
|
||||
const { INTERNAL_IP, DOMAIN, NGINX_PORT } = process.env;
|
||||
|
||||
const nextConfig = {
|
||||
webpackDevMiddleware: (config) => {
|
||||
|
@ -11,8 +11,9 @@ const nextConfig = {
|
|||
},
|
||||
reactStrictMode: true,
|
||||
env: {
|
||||
INTERNAL_IP: INTERNAL_IP,
|
||||
NEXT_PUBLIC_INTERNAL_IP: INTERNAL_IP,
|
||||
NEXT_PUBLIC_DOMAIN: DOMAIN,
|
||||
NEXT_PUBLIC_PORT: NGINX_PORT,
|
||||
},
|
||||
basePath: '/dashboard',
|
||||
};
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
export const getUrl = (url: string) => {
|
||||
const domain = process.env.NEXT_PUBLIC_DOMAIN;
|
||||
let prefix = '';
|
||||
|
||||
if (domain !== 'tipi.localhost') {
|
||||
prefix = 'dashboard';
|
||||
}
|
||||
let prefix = 'dashboard';
|
||||
|
||||
return `/${prefix}/${url}`;
|
||||
};
|
||||
|
|
|
@ -1,22 +1,18 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import { ApolloClient } from '@apollo/client';
|
||||
import axios from 'axios';
|
||||
import useSWR, { BareFetcher } from 'swr';
|
||||
import { createApolloClient } from '../core/apollo/client';
|
||||
import { useSytemStore } from '../state/systemStore';
|
||||
import { getUrl } from '../core/helpers/url-helpers';
|
||||
|
||||
interface IReturnProps {
|
||||
client?: ApolloClient<unknown>;
|
||||
isLoadingComplete?: boolean;
|
||||
}
|
||||
|
||||
const fetcher: BareFetcher<any> = (url: string) => {
|
||||
return axios.get(getUrl(url)).then((res) => res.data);
|
||||
};
|
||||
|
||||
export default function useCachedResources(): IReturnProps {
|
||||
const { data } = useSWR<{ ip: string; domain: string; port: string }>('api/ip', fetcher);
|
||||
const ip = process.env.NEXT_PUBLIC_INTERNAL_IP;
|
||||
const domain = process.env.NEXT_PUBLIC_DOMAIN;
|
||||
const port = process.env.NEXT_PUBLIC_PORT;
|
||||
|
||||
const { baseUrl, setBaseUrl, setInternalIp, setDomain } = useSytemStore();
|
||||
const [isLoadingComplete, setLoadingComplete] = useState(false);
|
||||
const [client, setClient] = useState<ApolloClient<unknown>>();
|
||||
|
@ -35,7 +31,6 @@ export default function useCachedResources(): IReturnProps {
|
|||
}
|
||||
|
||||
useEffect(() => {
|
||||
const { ip, domain, port } = data || {};
|
||||
if (ip && !baseUrl) {
|
||||
setInternalIp(ip);
|
||||
setDomain(domain);
|
||||
|
@ -50,7 +45,7 @@ export default function useCachedResources(): IReturnProps {
|
|||
setBaseUrl(`https://${domain}/api`);
|
||||
}
|
||||
}
|
||||
}, [baseUrl, setBaseUrl, data, setInternalIp, setDomain]);
|
||||
}, [baseUrl, setBaseUrl, setInternalIp, setDomain, ip, domain, port]);
|
||||
|
||||
useEffect(() => {
|
||||
if (baseUrl) {
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
export default function ip(_: any, res: any) {
|
||||
const { INTERNAL_IP } = process.env;
|
||||
const { NGINX_PORT } = process.env;
|
||||
const { DOMAIN } = process.env;
|
||||
|
||||
res.status(200).json({ ip: INTERNAL_IP, domain: DOMAIN, port: NGINX_PORT });
|
||||
}
|
11
packages/system-api/__mocks__/node-cron.ts
Normal file
11
packages/system-api/__mocks__/node-cron.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
const cron: {
|
||||
schedule: typeof schedule;
|
||||
} = jest.genMockFromModule('node-cron');
|
||||
|
||||
const schedule = (scd: string, cb: () => void) => {
|
||||
cb();
|
||||
};
|
||||
|
||||
cron.schedule = schedule;
|
||||
|
||||
module.exports = cron;
|
|
@ -7,9 +7,15 @@ module.exports = {
|
|||
setupFiles: ['<rootDir>/src/test/dotenv-config.ts'],
|
||||
setupFilesAfterEnv: ['<rootDir>/src/test/jest-setup.ts'],
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: ['src/**/*.{ts,tsx}', '!src/**/migrations/**/*.{ts,tsx}', '!**/config/**/*.{ts,tsx}', '!**/__tests__/**'],
|
||||
collectCoverageFrom: ['src/**/*.{ts,tsx}', '!src/**/migrations/**/*.{ts,tsx}', '!**/src/config/**/*.{ts,tsx}', '!**/__tests__/**'],
|
||||
passWithNoTests: true,
|
||||
transform: {
|
||||
'^.+\\.graphql$': 'graphql-import-node/jest',
|
||||
},
|
||||
globals: {
|
||||
// NODE_ENV: 'test',
|
||||
'ts-jest': {
|
||||
isolatedModules: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -56,7 +56,8 @@
|
|||
"type-graphql": "^1.1.1",
|
||||
"typeorm": "^0.3.6",
|
||||
"validator": "^13.7.0",
|
||||
"winston": "^3.7.2"
|
||||
"winston": "^3.7.2",
|
||||
"zod": "^3.19.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@faker-js/faker": "^7.3.0",
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
import * as dotenv from 'dotenv';
|
||||
|
||||
interface IConfig {
|
||||
logs: {
|
||||
LOGS_FOLDER: string;
|
||||
LOGS_APP: string;
|
||||
LOGS_ERROR: string;
|
||||
};
|
||||
NODE_ENV: string;
|
||||
ROOT_FOLDER: string;
|
||||
JWT_SECRET: string;
|
||||
CLIENT_URLS: string[];
|
||||
VERSION: string;
|
||||
ROOT_FOLDER_HOST: string;
|
||||
APPS_REPO_ID: string;
|
||||
APPS_REPO_URL: string;
|
||||
INTERNAL_IP: string;
|
||||
}
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
dotenv.config({ path: '.env.dev' });
|
||||
} else {
|
||||
dotenv.config({ path: '.env' });
|
||||
}
|
||||
|
||||
const {
|
||||
LOGS_FOLDER = 'logs',
|
||||
LOGS_APP = 'app.log',
|
||||
LOGS_ERROR = 'error.log',
|
||||
NODE_ENV = 'development',
|
||||
JWT_SECRET = '',
|
||||
INTERNAL_IP = '',
|
||||
TIPI_VERSION = '',
|
||||
ROOT_FOLDER_HOST = '',
|
||||
NGINX_PORT = '80',
|
||||
APPS_REPO_ID = '',
|
||||
APPS_REPO_URL = '',
|
||||
DOMAIN = '',
|
||||
} = process.env;
|
||||
|
||||
const config: IConfig = {
|
||||
logs: {
|
||||
LOGS_FOLDER,
|
||||
LOGS_APP,
|
||||
LOGS_ERROR,
|
||||
},
|
||||
NODE_ENV,
|
||||
ROOT_FOLDER: '/tipi',
|
||||
JWT_SECRET,
|
||||
CLIENT_URLS: ['http://localhost:3000', `http://${INTERNAL_IP}`, `http://${INTERNAL_IP}:${NGINX_PORT}`, `http://${INTERNAL_IP}:3000`, `https://${DOMAIN}`],
|
||||
VERSION: TIPI_VERSION,
|
||||
ROOT_FOLDER_HOST,
|
||||
APPS_REPO_ID,
|
||||
APPS_REPO_URL,
|
||||
INTERNAL_IP,
|
||||
};
|
||||
|
||||
export default config;
|
|
@ -1 +0,0 @@
|
|||
export { default } from './config';
|
|
@ -1,13 +1,15 @@
|
|||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { createLogger, format, transports } from 'winston';
|
||||
import config from '..';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
const { logs, NODE_ENV } = getConfig();
|
||||
|
||||
const { align, printf, timestamp, combine, colorize } = format;
|
||||
|
||||
// Create the logs directory if it does not exist
|
||||
if (!fs.existsSync(config.logs.LOGS_FOLDER)) {
|
||||
fs.mkdirSync(config.logs.LOGS_FOLDER);
|
||||
if (!fs.existsSync(logs.LOGS_FOLDER)) {
|
||||
fs.mkdirSync(logs.LOGS_FOLDER);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -36,14 +38,14 @@ const Logger = createLogger({
|
|||
// - Write all logs error (and below) to `error.log`.
|
||||
//
|
||||
new transports.File({
|
||||
filename: path.join(config.logs.LOGS_FOLDER, config.logs.LOGS_ERROR),
|
||||
filename: path.join(logs.LOGS_FOLDER, logs.LOGS_ERROR),
|
||||
level: 'error',
|
||||
}),
|
||||
new transports.File({
|
||||
filename: path.join(config.logs.LOGS_FOLDER, config.logs.LOGS_APP),
|
||||
filename: path.join(logs.LOGS_FOLDER, logs.LOGS_APP),
|
||||
}),
|
||||
],
|
||||
exceptionHandlers: [new transports.File({ filename: path.join(config.logs.LOGS_FOLDER, config.logs.LOGS_ERROR) })],
|
||||
exceptionHandlers: [new transports.File({ filename: path.join(logs.LOGS_FOLDER, logs.LOGS_ERROR) })],
|
||||
});
|
||||
|
||||
//
|
||||
|
@ -59,4 +61,4 @@ const LoggerDev = createLogger({
|
|||
],
|
||||
});
|
||||
|
||||
export default config.NODE_ENV === 'production' ? Logger : LoggerDev;
|
||||
export default NODE_ENV === 'production' ? Logger : LoggerDev;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import config from '../config';
|
||||
import { getConfig } from '../core/config/TipiConfig';
|
||||
|
||||
export const APP_DATA_FOLDER = 'app-data';
|
||||
export const APPS_FOLDER = 'apps';
|
||||
export const isProd = config.NODE_ENV === 'production';
|
||||
export const isProd = getConfig().NODE_ENV === 'production';
|
||||
|
|
112
packages/system-api/src/core/config/TipiConfig.ts
Normal file
112
packages/system-api/src/core/config/TipiConfig.ts
Normal file
|
@ -0,0 +1,112 @@
|
|||
import { z } from 'zod';
|
||||
import * as dotenv from 'dotenv';
|
||||
import fs from 'fs-extra';
|
||||
import { readJsonFile } from '../../modules/fs/fs.helpers';
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
dotenv.config({ path: '.env.dev' });
|
||||
} else {
|
||||
dotenv.config({ path: '.env' });
|
||||
}
|
||||
const {
|
||||
LOGS_FOLDER = 'logs',
|
||||
LOGS_APP = 'app.log',
|
||||
LOGS_ERROR = 'error.log',
|
||||
NODE_ENV = 'development',
|
||||
JWT_SECRET = '',
|
||||
INTERNAL_IP = '',
|
||||
TIPI_VERSION = '',
|
||||
NGINX_PORT = '80',
|
||||
APPS_REPO_ID = '',
|
||||
APPS_REPO_URL = '',
|
||||
DOMAIN = '',
|
||||
} = process.env;
|
||||
|
||||
const configSchema = z.object({
|
||||
NODE_ENV: z.union([z.literal('development'), z.literal('production'), z.literal('test')]),
|
||||
logs: z.object({
|
||||
LOGS_FOLDER: z.string(),
|
||||
LOGS_APP: z.string(),
|
||||
LOGS_ERROR: z.string(),
|
||||
}),
|
||||
dnsIp: z.string(),
|
||||
rootFolder: z.string(),
|
||||
internalIp: z.string(),
|
||||
version: z.string(),
|
||||
jwtSecret: z.string(),
|
||||
clientUrls: z.array(z.string()),
|
||||
appsRepoId: z.string(),
|
||||
appsRepoUrl: z.string(),
|
||||
domain: z.string(),
|
||||
});
|
||||
|
||||
class Config {
|
||||
private static instance: Config;
|
||||
|
||||
private config: z.infer<typeof configSchema>;
|
||||
|
||||
constructor() {
|
||||
const envConfig: z.infer<typeof configSchema> = {
|
||||
logs: {
|
||||
LOGS_FOLDER,
|
||||
LOGS_APP,
|
||||
LOGS_ERROR,
|
||||
},
|
||||
NODE_ENV: NODE_ENV as z.infer<typeof configSchema>['NODE_ENV'],
|
||||
rootFolder: '/runtipi',
|
||||
internalIp: INTERNAL_IP,
|
||||
version: TIPI_VERSION,
|
||||
jwtSecret: JWT_SECRET,
|
||||
clientUrls: ['http://localhost:3000', `http://${INTERNAL_IP}`, `http://${INTERNAL_IP}:${NGINX_PORT}`, `http://${INTERNAL_IP}:3000`, DOMAIN && `https://${DOMAIN}`].filter(Boolean),
|
||||
appsRepoId: APPS_REPO_ID,
|
||||
appsRepoUrl: APPS_REPO_URL,
|
||||
domain: DOMAIN,
|
||||
dnsIp: '9.9.9.9',
|
||||
};
|
||||
|
||||
const parsed = configSchema.parse({
|
||||
...envConfig,
|
||||
});
|
||||
|
||||
this.config = parsed;
|
||||
}
|
||||
|
||||
public static getInstance(): Config {
|
||||
if (!Config.instance) {
|
||||
Config.instance = new Config();
|
||||
}
|
||||
return Config.instance;
|
||||
}
|
||||
|
||||
public getConfig() {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
public applyJsonConfig() {
|
||||
const fileConfig = readJsonFile('/state/settings.json');
|
||||
|
||||
const parsed = configSchema.parse({
|
||||
...this.config,
|
||||
...fileConfig,
|
||||
});
|
||||
|
||||
this.config = parsed;
|
||||
}
|
||||
|
||||
public setConfig(key: keyof typeof configSchema.shape, value: any) {
|
||||
const newConf = { ...this.getConfig() };
|
||||
newConf[key] = value;
|
||||
|
||||
this.config = configSchema.parse(newConf);
|
||||
|
||||
fs.writeFileSync(`${this.config.rootFolder}/state/settings.json`, JSON.stringify(newConf));
|
||||
}
|
||||
}
|
||||
|
||||
export const setConfig = (key: keyof typeof configSchema.shape, value: any) => {
|
||||
Config.getInstance().setConfig(key, value);
|
||||
};
|
||||
|
||||
export const getConfig = () => Config.getInstance().getConfig();
|
||||
|
||||
export const applyJsonConfig = () => Config.getInstance().applyJsonConfig();
|
|
@ -0,0 +1,83 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import fs from 'fs-extra';
|
||||
import { applyJsonConfig, getConfig, setConfig } from '../TipiConfig';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetModules();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: getConfig', () => {
|
||||
it('It should return config from .env', () => {
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config.NODE_ENV).toBe('test');
|
||||
expect(config.logs.LOGS_FOLDER).toBe('logs');
|
||||
expect(config.logs.LOGS_APP).toBe('app.log');
|
||||
expect(config.logs.LOGS_ERROR).toBe('error.log');
|
||||
expect(config.dnsIp).toBe('9.9.9.9');
|
||||
expect(config.rootFolder).toBe('/runtipi');
|
||||
expect(config.internalIp).toBe('192.168.1.10');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: setConfig', () => {
|
||||
it('It should be able set config', () => {
|
||||
const randomWord = faker.random.word();
|
||||
setConfig('appsRepoUrl', randomWord);
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
expect(config.appsRepoUrl).toBe(randomWord);
|
||||
});
|
||||
|
||||
it('Should not be able to set invalid NODE_ENV', () => {
|
||||
expect(() => setConfig('NODE_ENV', 'invalid')).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: applyJsonConfig', () => {
|
||||
it('It should be able to apply json config', () => {
|
||||
const settingsJson = {
|
||||
appsRepoUrl: faker.random.word(),
|
||||
appsRepoId: faker.random.word(),
|
||||
domain: faker.random.word(),
|
||||
};
|
||||
|
||||
const MockFiles = {
|
||||
'/runtipi/state/settings.json': JSON.stringify(settingsJson),
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
applyJsonConfig();
|
||||
const config = getConfig();
|
||||
|
||||
expect(config).toBeDefined();
|
||||
|
||||
expect(config.appsRepoUrl).toBe(settingsJson.appsRepoUrl);
|
||||
expect(config.appsRepoId).toBe(settingsJson.appsRepoId);
|
||||
expect(config.domain).toBe(settingsJson.domain);
|
||||
});
|
||||
|
||||
it('Should not be able to apply an invalid value from json config', () => {
|
||||
const settingsJson = {
|
||||
appsRepoUrl: faker.random.word(),
|
||||
appsRepoId: faker.random.word(),
|
||||
domain: 10,
|
||||
};
|
||||
|
||||
const MockFiles = {
|
||||
'/runtipi/state/settings.json': JSON.stringify(settingsJson),
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
expect(() => applyJsonConfig()).toThrow();
|
||||
});
|
||||
});
|
32
packages/system-api/src/core/jobs/__tests__/jobs.test.ts
Normal file
32
packages/system-api/src/core/jobs/__tests__/jobs.test.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import cron from 'node-cron';
|
||||
import * as repoHelpers from '../../../helpers/repo-helpers';
|
||||
import { getConfig } from '../../config/TipiConfig';
|
||||
import startJobs from '../jobs';
|
||||
|
||||
jest.mock('node-cron');
|
||||
jest.mock('child_process');
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetModules();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: startJobs', () => {
|
||||
it('Should start cron jobs', () => {
|
||||
const spy = jest.spyOn(cron, 'schedule');
|
||||
|
||||
startJobs();
|
||||
expect(spy).toHaveBeenCalled();
|
||||
expect(spy).toHaveBeenCalledWith('0 * * * *', expect.any(Function));
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('Should update apps repo on cron trigger', () => {
|
||||
const spy = jest.spyOn(repoHelpers, 'updateRepo');
|
||||
|
||||
startJobs();
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(getConfig().appsRepoUrl);
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
|
@ -1,14 +1,14 @@
|
|||
import cron from 'node-cron';
|
||||
import config from '../../config';
|
||||
import logger from '../../config/logger/logger';
|
||||
import { updateRepo } from '../../helpers/repo-helpers';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
const startJobs = () => {
|
||||
logger.info('Starting cron jobs...');
|
||||
|
||||
cron.schedule('0 * * * *', () => {
|
||||
logger.info('Cloning apps repo...');
|
||||
updateRepo(config.APPS_REPO_URL);
|
||||
logger.info('Updating apps repo...');
|
||||
updateRepo(getConfig().appsRepoUrl);
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import session from 'express-session';
|
||||
import config from '../../config';
|
||||
import SessionFileStore from 'session-file-store';
|
||||
import { COOKIE_MAX_AGE, __prod__ } from '../../config/constants/constants';
|
||||
import { getConfig } from '../config/TipiConfig';
|
||||
|
||||
const getSessionMiddleware = () => {
|
||||
const FileStore = SessionFileStore(session);
|
||||
|
@ -12,7 +12,7 @@ const getSessionMiddleware = () => {
|
|||
name: 'qid',
|
||||
store: new FileStore(),
|
||||
cookie: { maxAge: COOKIE_MAX_AGE, secure: false, sameSite, httpOnly: true },
|
||||
secret: config.JWT_SECRET,
|
||||
secret: getConfig().jwtSecret,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@ import { AppInfo, AppStatusEnum } from '../../../modules/apps/apps.types';
|
|||
import { createApp } from '../../../modules/apps/__tests__/apps.factory';
|
||||
import Update, { UpdateStatusEnum } from '../../../modules/system/update.entity';
|
||||
import { setupConnection, teardownConnection } from '../../../test/connection';
|
||||
import { getConfig } from '../../config/TipiConfig';
|
||||
import { updateV040 } from '../v040';
|
||||
|
||||
jest.mock('fs');
|
||||
|
@ -61,7 +62,7 @@ describe('No state/apps.json', () => {
|
|||
describe('State/apps.json exists with no installed app', () => {
|
||||
beforeEach(async () => {
|
||||
const { MockFiles } = await createApp({});
|
||||
MockFiles['/tipi/state/apps.json'] = createState([]);
|
||||
MockFiles[`${getConfig().rootFolder}/state/apps.json`] = createState([]);
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
});
|
||||
|
@ -79,7 +80,7 @@ describe('State/apps.json exists with no installed app', () => {
|
|||
|
||||
it('Should delete state file after update', async () => {
|
||||
await updateV040();
|
||||
expect(fs.existsSync('/tipi/state/apps.json')).toBe(false);
|
||||
expect(fs.existsSync(`${getConfig().rootFolder}/state/apps.json`)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -88,9 +89,9 @@ describe('State/apps.json exists with one installed app', () => {
|
|||
beforeEach(async () => {
|
||||
const { MockFiles, appInfo } = await createApp({});
|
||||
app1 = appInfo;
|
||||
MockFiles['/tipi/state/apps.json'] = createState([appInfo.id]);
|
||||
MockFiles[`/tipi/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`/tipi/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
MockFiles[`${getConfig().rootFolder}/state/apps.json`] = createState([appInfo.id]);
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
});
|
||||
|
@ -117,9 +118,9 @@ describe('State/apps.json exists with one installed app', () => {
|
|||
it('Should not try to migrate app if it already exists', async () => {
|
||||
const { MockFiles, appInfo } = await createApp({ installed: true });
|
||||
app1 = appInfo;
|
||||
MockFiles['/tipi/state/apps.json'] = createState([appInfo.id]);
|
||||
MockFiles[`/tipi/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`/tipi/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
MockFiles[`${getConfig().rootFolder}/state/apps.json`] = createState([appInfo.id]);
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import datasource from '../../config/datasource';
|
||||
import { DataSource } from 'typeorm';
|
||||
import logger from '../../config/logger/logger';
|
||||
import App from '../../modules/apps/app.entity';
|
||||
import User from '../../modules/auth/user.entity';
|
||||
import Update from '../../modules/system/update.entity';
|
||||
|
||||
const recover = async () => {
|
||||
const recover = async (datasource: DataSource) => {
|
||||
logger.info('Recovering broken database');
|
||||
|
||||
const queryRunner = datasource.createQueryRunner();
|
||||
|
@ -33,9 +33,10 @@ const recover = async () => {
|
|||
await Update.create(update).save();
|
||||
}
|
||||
|
||||
logger.info('Users recovered', users.length);
|
||||
logger.info('Apps recovered', apps.length);
|
||||
logger.info('Database recovered');
|
||||
logger.info(`Users recovered ${users.length}`);
|
||||
logger.info(`Apps recovered ${apps.length}`);
|
||||
logger.info(`Updates recovered ${updates.length}`);
|
||||
logger.info('Database fully recovered');
|
||||
};
|
||||
|
||||
export default recover;
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import config from '../../config';
|
||||
import logger from '../../config/logger/logger';
|
||||
import App from '../../modules/apps/app.entity';
|
||||
import { AppInfo, AppStatusEnum } from '../../modules/apps/apps.types';
|
||||
import User from '../../modules/auth/user.entity';
|
||||
import { deleteFolder, fileExists, readFile, readJsonFile } from '../../modules/fs/fs.helpers';
|
||||
import Update, { UpdateStatusEnum } from '../../modules/system/update.entity';
|
||||
import { getConfig } from '../config/TipiConfig';
|
||||
|
||||
type AppsState = { installed: string };
|
||||
|
||||
|
@ -39,7 +39,7 @@ export const updateV040 = async (): Promise<void> => {
|
|||
|
||||
const form: Record<string, string> = {};
|
||||
|
||||
const configFile: AppInfo | null = readJsonFile(`/repos/${config.APPS_REPO_ID}/apps/${appId}/config.json`);
|
||||
const configFile: AppInfo | null = readJsonFile(`/repos/${getConfig().appsRepoId}/apps/${appId}/config.json`);
|
||||
configFile?.form_fields?.forEach((field) => {
|
||||
const envVar = field.env_variable;
|
||||
const envVarValue = envVarsMap.get(envVar);
|
||||
|
@ -70,7 +70,6 @@ export const updateV040 = async (): Promise<void> => {
|
|||
await Update.create({ name: UPDATE_NAME, status: UpdateStatusEnum.SUCCESS }).save();
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
console.error(error);
|
||||
await Update.create({ name: UPDATE_NAME, status: UpdateStatusEnum.FAILED }).save();
|
||||
}
|
||||
};
|
||||
|
|
100
packages/system-api/src/helpers/__tests__/repo-helpers.test.ts
Normal file
100
packages/system-api/src/helpers/__tests__/repo-helpers.test.ts
Normal file
|
@ -0,0 +1,100 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import childProcess from 'child_process';
|
||||
import logger from '../../config/logger/logger';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
import { cloneRepo, updateRepo } from '../repo-helpers';
|
||||
|
||||
jest.mock('child_process');
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.resetModules();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Test: updateRepo', () => {
|
||||
it('Should run update script', async () => {
|
||||
const log = jest.spyOn(logger, 'info');
|
||||
const spy = jest.spyOn(childProcess, 'execFile');
|
||||
const url = faker.internet.url();
|
||||
const stdout = faker.random.words();
|
||||
|
||||
// @ts-ignore
|
||||
spy.mockImplementation((_path, _args, _, cb) => {
|
||||
// @ts-ignore
|
||||
if (cb) cb(null, stdout, null);
|
||||
});
|
||||
|
||||
await updateRepo(url);
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/scripts/git.sh`, ['update', url], {}, expect.any(Function));
|
||||
expect(log).toHaveBeenCalledWith(`Update result: ${stdout}`);
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('Should throw and log error if script failed', async () => {
|
||||
const url = faker.internet.url();
|
||||
|
||||
const log = jest.spyOn(logger, 'error');
|
||||
const spy = jest.spyOn(childProcess, 'execFile');
|
||||
|
||||
const randomWord = faker.random.word();
|
||||
|
||||
// @ts-ignore
|
||||
spy.mockImplementation((_path, _args, _, cb) => {
|
||||
// @ts-ignore
|
||||
if (cb) cb(randomWord, null, null);
|
||||
});
|
||||
|
||||
try {
|
||||
await updateRepo(url);
|
||||
} catch (e) {
|
||||
expect(e).toBe(randomWord);
|
||||
expect(log).toHaveBeenCalledWith(`Error updating repo: ${randomWord}`);
|
||||
}
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: cloneRepo', () => {
|
||||
it('Should run clone script', async () => {
|
||||
const log = jest.spyOn(logger, 'info');
|
||||
const spy = jest.spyOn(childProcess, 'execFile');
|
||||
const url = faker.internet.url();
|
||||
const stdout = faker.random.words();
|
||||
|
||||
// @ts-ignore
|
||||
spy.mockImplementation((_path, _args, _, cb) => {
|
||||
// @ts-ignore
|
||||
if (cb) cb(null, stdout, null);
|
||||
});
|
||||
|
||||
await cloneRepo(url);
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/scripts/git.sh`, ['clone', url], {}, expect.any(Function));
|
||||
expect(log).toHaveBeenCalledWith(`Clone result ${stdout}`);
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('Should throw and log error if script failed', async () => {
|
||||
const url = faker.internet.url();
|
||||
|
||||
const log = jest.spyOn(logger, 'error');
|
||||
const spy = jest.spyOn(childProcess, 'execFile');
|
||||
|
||||
const randomWord = faker.random.word();
|
||||
|
||||
// @ts-ignore
|
||||
spy.mockImplementation((_path, _args, _, cb) => {
|
||||
// @ts-ignore
|
||||
if (cb) cb(randomWord, null, null);
|
||||
});
|
||||
|
||||
try {
|
||||
await cloneRepo(url);
|
||||
} catch (e) {
|
||||
expect(e).toBe(randomWord);
|
||||
expect(log).toHaveBeenCalledWith(`Error cloning repo: ${randomWord}`);
|
||||
}
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
|
@ -1,13 +1,15 @@
|
|||
import Logger from '../config/logger/logger';
|
||||
import { runScript } from '../modules/fs/fs.helpers';
|
||||
|
||||
export const updateRepo = (repo: string): Promise<void> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
runScript('/scripts/git.sh', ['update', repo], (err: string, stdout: string) => {
|
||||
if (err) {
|
||||
Logger.error(`Error updating repo: ${err}`);
|
||||
reject(err);
|
||||
}
|
||||
|
||||
console.info('Update result', stdout);
|
||||
Logger.info(`Update result: ${stdout}`);
|
||||
|
||||
resolve();
|
||||
});
|
||||
|
@ -18,10 +20,11 @@ export const cloneRepo = (repo: string): Promise<void> => {
|
|||
return new Promise((resolve, reject) => {
|
||||
runScript('/scripts/git.sh', ['clone', repo], (err: string, stdout: string) => {
|
||||
if (err) {
|
||||
Logger.error(`Error cloning repo: ${err}`);
|
||||
reject(err);
|
||||
}
|
||||
|
||||
console.info('Clone result', stdout);
|
||||
Logger.info(`Clone result ${stdout}`);
|
||||
|
||||
resolve();
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import { AppCategoriesEnum, AppInfo, AppStatusEnum, FieldTypes } from '../apps.types';
|
||||
import config from '../../../config';
|
||||
import App from '../app.entity';
|
||||
import { getConfig } from '../../../core/config/TipiConfig';
|
||||
|
||||
interface IProps {
|
||||
installed?: boolean;
|
||||
|
@ -55,11 +55,11 @@ const createApp = async (props: IProps) => {
|
|||
}
|
||||
|
||||
let MockFiles: any = {};
|
||||
MockFiles[`${config.ROOT_FOLDER}/.env`] = 'TEST=test';
|
||||
MockFiles[`${config.ROOT_FOLDER}/repos/repo-id`] = '';
|
||||
MockFiles[`${config.ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfo);
|
||||
MockFiles[`${config.ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/docker-compose.yml`] = 'compose';
|
||||
MockFiles[`${config.ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||
MockFiles[`${getConfig().rootFolder}/.env`] = 'TEST=test';
|
||||
MockFiles[`${getConfig().rootFolder}/repos/repo-id`] = '';
|
||||
MockFiles[`${getConfig().rootFolder}/repos/repo-id/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfo);
|
||||
MockFiles[`${getConfig().rootFolder}/repos/repo-id/apps/${appInfo.id}/docker-compose.yml`] = 'compose';
|
||||
MockFiles[`${getConfig().rootFolder}/repos/repo-id/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||
|
||||
let appEntity = new App();
|
||||
if (installed) {
|
||||
|
@ -71,10 +71,10 @@ const createApp = async (props: IProps) => {
|
|||
domain,
|
||||
}).save();
|
||||
|
||||
MockFiles[`${config.ROOT_FOLDER}/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`${config.ROOT_FOLDER}/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
MockFiles[`${config.ROOT_FOLDER}/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfo);
|
||||
MockFiles[`${config.ROOT_FOLDER}/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}`] = '';
|
||||
MockFiles[`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`] = 'TEST=test\nAPP_PORT=3000\nTEST_FIELD=test';
|
||||
MockFiles[`${getConfig().rootFolder}/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfo);
|
||||
MockFiles[`${getConfig().rootFolder}/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||
}
|
||||
|
||||
return { appInfo, MockFiles, appEntity };
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { faker } from '@faker-js/faker';
|
||||
import fs from 'fs-extra';
|
||||
import { DataSource } from 'typeorm';
|
||||
import config from '../../../config';
|
||||
import { getConfig } from '../../../core/config/TipiConfig';
|
||||
import { setupConnection, teardownConnection } from '../../../test/connection';
|
||||
import App from '../app.entity';
|
||||
import { checkAppRequirements, checkEnvFile, generateEnvFile, getAppInfo, getAvailableApps, getEnvMap, getUpdateInfo, runAppScript } from '../apps.helpers';
|
||||
|
@ -95,7 +95,7 @@ describe('checkEnvFile', () => {
|
|||
|
||||
it('Should throw if a required field is missing', () => {
|
||||
const newAppEnv = 'APP_PORT=test\n';
|
||||
fs.writeFileSync(`${config.ROOT_FOLDER}/app-data/${app1.id}/app.env`, newAppEnv);
|
||||
fs.writeFileSync(`${getConfig().rootFolder}/app-data/${app1.id}/app.env`, newAppEnv);
|
||||
|
||||
try {
|
||||
checkEnvFile(app1.id);
|
||||
|
@ -167,7 +167,7 @@ describe('generateEnvFile', () => {
|
|||
|
||||
const randomField = faker.random.alphaNumeric(32);
|
||||
|
||||
fs.writeFileSync(`${config.ROOT_FOLDER}/app-data/${appInfo.id}/app.env`, `RANDOM_FIELD=${randomField}`);
|
||||
fs.writeFileSync(`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`, `RANDOM_FIELD=${randomField}`);
|
||||
|
||||
generateEnvFile(appEntity);
|
||||
|
||||
|
@ -271,7 +271,7 @@ describe('getAppInfo', () => {
|
|||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
fs.writeFileSync(`${config.ROOT_FOLDER}/repos/repo-id/apps/${app1.id}/config.json`, '{}');
|
||||
fs.writeFileSync(`${getConfig().rootFolder}/repos/repo-id/apps/${app1.id}/config.json`, '{}');
|
||||
|
||||
const app = await getAppInfo(appInfo.id);
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import AppsService from '../apps.service';
|
||||
import fs from 'fs-extra';
|
||||
import config from '../../../config';
|
||||
import childProcess from 'child_process';
|
||||
import { AppInfo, AppStatusEnum } from '../apps.types';
|
||||
import App from '../app.entity';
|
||||
|
@ -8,6 +7,7 @@ import { createApp } from './apps.factory';
|
|||
import { setupConnection, teardownConnection } from '../../../test/connection';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { getEnvMap } from '../apps.helpers';
|
||||
import { getConfig } from '../../../core/config/TipiConfig';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
jest.mock('child_process');
|
||||
|
@ -43,7 +43,7 @@ describe('Install app', () => {
|
|||
|
||||
it('Should correctly generate env file for app', async () => {
|
||||
await AppsService.installApp(app1.id, { TEST_FIELD: 'test' });
|
||||
const envFile = fs.readFileSync(`${config.ROOT_FOLDER}/app-data/${app1.id}/app.env`).toString();
|
||||
const envFile = fs.readFileSync(`${getConfig().rootFolder}/app-data/${app1.id}/app.env`).toString();
|
||||
|
||||
expect(envFile.trim()).toBe(`TEST=test\nAPP_PORT=${app1.port}\nTEST_FIELD=test\nAPP_DOMAIN=192.168.1.10:${app1.port}`);
|
||||
});
|
||||
|
@ -63,7 +63,7 @@ describe('Install app', () => {
|
|||
const spy = jest.spyOn(childProcess, 'execFile');
|
||||
await AppsService.installApp(app1.id, { TEST_FIELD: 'test' });
|
||||
|
||||
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.lastCall).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['install', app1.id], {}, expect.any(Function)]);
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
|
@ -74,8 +74,8 @@ describe('Install app', () => {
|
|||
await AppsService.installApp(app1.id, { TEST_FIELD: 'test' });
|
||||
|
||||
expect(spy.mock.calls.length).toBe(2);
|
||||
expect(spy.mock.calls[0]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[1]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[0]).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['install', app1.id], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[1]).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['start', app1.id], {}, expect.any(Function)]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
@ -112,7 +112,7 @@ describe('Install app', () => {
|
|||
|
||||
it('Should correctly copy app from repos to apps folder', async () => {
|
||||
await AppsService.installApp(app1.id, { TEST_FIELD: 'test' });
|
||||
const appFolder = fs.readdirSync(`${config.ROOT_FOLDER}/apps/${app1.id}`);
|
||||
const appFolder = fs.readdirSync(`${getConfig().rootFolder}/apps/${app1.id}`);
|
||||
|
||||
expect(appFolder).toBeDefined();
|
||||
expect(appFolder.indexOf('docker-compose.yml')).toBeGreaterThanOrEqual(0);
|
||||
|
@ -121,19 +121,19 @@ describe('Install app', () => {
|
|||
it('Should cleanup any app folder existing before install', async () => {
|
||||
const { MockFiles, appInfo } = await createApp({});
|
||||
app1 = appInfo;
|
||||
MockFiles[`/tipi/apps/${appInfo.id}/docker-compose.yml`] = 'test';
|
||||
MockFiles[`/tipi/apps/${appInfo.id}/test.yml`] = 'test';
|
||||
MockFiles[`/tipi/apps/${appInfo.id}`] = ['test.yml', 'docker-compose.yml'];
|
||||
MockFiles[`${getConfig().rootFolder}/apps/${appInfo.id}/docker-compose.yml`] = 'test';
|
||||
MockFiles[`${getConfig().rootFolder}/apps/${appInfo.id}/test.yml`] = 'test';
|
||||
MockFiles[`${getConfig().rootFolder}/apps/${appInfo.id}`] = ['test.yml', 'docker-compose.yml'];
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
expect(fs.existsSync(`${config.ROOT_FOLDER}/apps/${app1.id}/test.yml`)).toBe(true);
|
||||
expect(fs.existsSync(`${getConfig().rootFolder}/apps/${app1.id}/test.yml`)).toBe(true);
|
||||
|
||||
await AppsService.installApp(app1.id, { TEST_FIELD: 'test' });
|
||||
|
||||
expect(fs.existsSync(`${config.ROOT_FOLDER}/apps/${app1.id}/test.yml`)).toBe(false);
|
||||
expect(fs.existsSync(`${config.ROOT_FOLDER}/apps/${app1.id}/docker-compose.yml`)).toBe(true);
|
||||
expect(fs.existsSync(`${getConfig().rootFolder}/apps/${app1.id}/test.yml`)).toBe(false);
|
||||
expect(fs.existsSync(`${getConfig().rootFolder}/apps/${app1.id}/docker-compose.yml`)).toBe(true);
|
||||
});
|
||||
|
||||
it('Should throw if app is exposed and domain is not provided', async () => {
|
||||
|
@ -194,7 +194,7 @@ describe('Uninstall app', () => {
|
|||
|
||||
await AppsService.uninstallApp(app1.id);
|
||||
|
||||
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['uninstall', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.lastCall).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['uninstall', app1.id], {}, expect.any(Function)]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
@ -205,8 +205,8 @@ describe('Uninstall app', () => {
|
|||
await AppsService.uninstallApp(app1.id);
|
||||
|
||||
expect(spy.mock.calls.length).toBe(2);
|
||||
expect(spy.mock.calls[0]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['stop', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[1]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['uninstall', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[0]).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['stop', app1.id], {}, expect.any(Function)]);
|
||||
expect(spy.mock.calls[1]).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['uninstall', app1.id], {}, expect.any(Function)]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
@ -245,7 +245,7 @@ describe('Start app', () => {
|
|||
|
||||
await AppsService.startApp(app1.id);
|
||||
|
||||
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.lastCall).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['start', app1.id], {}, expect.any(Function)]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
@ -266,11 +266,11 @@ describe('Start app', () => {
|
|||
});
|
||||
|
||||
it('Regenerate env file', async () => {
|
||||
fs.writeFile(`${config.ROOT_FOLDER}/app-data/${app1.id}/app.env`, 'TEST=test\nAPP_PORT=3000', () => {});
|
||||
fs.writeFile(`${getConfig().rootFolder}/app-data/${app1.id}/app.env`, 'TEST=test\nAPP_PORT=3000', () => {});
|
||||
|
||||
await AppsService.startApp(app1.id);
|
||||
|
||||
const envFile = fs.readFileSync(`${config.ROOT_FOLDER}/app-data/${app1.id}/app.env`).toString();
|
||||
const envFile = fs.readFileSync(`${getConfig().rootFolder}/app-data/${app1.id}/app.env`).toString();
|
||||
|
||||
expect(envFile.trim()).toBe(`TEST=test\nAPP_PORT=${app1.port}\nTEST_FIELD=test\nAPP_DOMAIN=192.168.1.10:${app1.port}`);
|
||||
});
|
||||
|
@ -302,7 +302,7 @@ describe('Stop app', () => {
|
|||
|
||||
await AppsService.stopApp(app1.id);
|
||||
|
||||
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['stop', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)]);
|
||||
expect(spy.mock.lastCall).toEqual([`${getConfig().rootFolder}/scripts/app.sh`, ['stop', app1.id], {}, expect.any(Function)]);
|
||||
});
|
||||
|
||||
it('Should throw if app is not installed', async () => {
|
||||
|
@ -334,7 +334,7 @@ describe('Update app config', () => {
|
|||
it('Should correctly update app config', async () => {
|
||||
await AppsService.updateAppConfig(app1.id, { TEST_FIELD: 'test' });
|
||||
|
||||
const envFile = fs.readFileSync(`${config.ROOT_FOLDER}/app-data/${app1.id}/app.env`).toString();
|
||||
const envFile = fs.readFileSync(`${getConfig().rootFolder}/app-data/${app1.id}/app.env`).toString();
|
||||
|
||||
expect(envFile.trim()).toBe(`TEST=test\nAPP_PORT=${app1.port}\nTEST_FIELD=test\nAPP_DOMAIN=192.168.1.10:${app1.port}`);
|
||||
});
|
||||
|
@ -352,8 +352,8 @@ describe('Update app config', () => {
|
|||
// @ts-ignore
|
||||
fs.__createMockFiles(MockFiles);
|
||||
|
||||
const envFile = fs.readFileSync(`${config.ROOT_FOLDER}/app-data/${appInfo.id}/app.env`).toString();
|
||||
fs.writeFileSync(`${config.ROOT_FOLDER}/app-data/${appInfo.id}/app.env`, `${envFile}\nRANDOM_FIELD=test`);
|
||||
const envFile = fs.readFileSync(`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`).toString();
|
||||
fs.writeFileSync(`${getConfig().rootFolder}/app-data/${appInfo.id}/app.env`, `${envFile}\nRANDOM_FIELD=test`);
|
||||
|
||||
await AppsService.updateAppConfig(appInfo.id, { TEST_FIELD: 'test' });
|
||||
|
||||
|
@ -470,8 +470,8 @@ describe('Start all apps', () => {
|
|||
|
||||
expect(spy.mock.calls.length).toBe(2);
|
||||
expect(spy.mock.calls).toEqual([
|
||||
[`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', app1.id, '/tipi', 'repo-id'], {}, expect.any(Function)],
|
||||
[`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', app2.id, '/tipi', 'repo-id'], {}, expect.any(Function)],
|
||||
[`${getConfig().rootFolder}/scripts/app.sh`, ['start', app1.id], {}, expect.any(Function)],
|
||||
[`${getConfig().rootFolder}/scripts/app.sh`, ['start', app2.id], {}, expect.any(Function)],
|
||||
]);
|
||||
});
|
||||
|
||||
|
|
|
@ -2,15 +2,17 @@ import portUsed from 'tcp-port-used';
|
|||
import { fileExists, getSeed, readdirSync, readFile, readJsonFile, runScript, writeFile } from '../fs/fs.helpers';
|
||||
import InternalIp from 'internal-ip';
|
||||
import crypto from 'crypto';
|
||||
import config from '../../config';
|
||||
import { AppInfo, AppStatusEnum } from './apps.types';
|
||||
import logger from '../../config/logger/logger';
|
||||
import App from './app.entity';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
const { appsRepoId, internalIp } = getConfig();
|
||||
|
||||
export const checkAppRequirements = async (appName: string) => {
|
||||
let valid = true;
|
||||
|
||||
const configFile: AppInfo | null = readJsonFile(`/repos/${config.APPS_REPO_ID}/apps/${appName}/config.json`);
|
||||
const configFile: AppInfo | null = readJsonFile(`/repos/${appsRepoId}/apps/${appName}/config.json`);
|
||||
|
||||
if (!configFile) {
|
||||
throw new Error(`App ${appName} not found`);
|
||||
|
@ -57,7 +59,7 @@ export const checkEnvFile = (appName: string) => {
|
|||
|
||||
export const runAppScript = async (params: string[]): Promise<void> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
runScript('/scripts/app.sh', [...params, config.ROOT_FOLDER_HOST, config.APPS_REPO_ID], (err: string) => {
|
||||
runScript('/scripts/app.sh', [...params], (err: string) => {
|
||||
if (err) {
|
||||
logger.error(err);
|
||||
reject(err);
|
||||
|
@ -110,7 +112,7 @@ export const generateEnvFile = (app: App) => {
|
|||
envFile += `APP_DOMAIN=${app.domain}\n`;
|
||||
envFile += 'APP_PROTOCOL=https\n';
|
||||
} else {
|
||||
envFile += `APP_DOMAIN=${config.INTERNAL_IP}:${configFile.port}\n`;
|
||||
envFile += `APP_DOMAIN=${internalIp}:${configFile.port}\n`;
|
||||
}
|
||||
|
||||
writeFile(`/app-data/${app.id}/app.env`, envFile);
|
||||
|
@ -119,11 +121,11 @@ export const generateEnvFile = (app: App) => {
|
|||
export const getAvailableApps = async (): Promise<string[]> => {
|
||||
const apps: string[] = [];
|
||||
|
||||
const appsDir = readdirSync(`/repos/${config.APPS_REPO_ID}/apps`);
|
||||
const appsDir = readdirSync(`/repos/${appsRepoId}/apps`);
|
||||
|
||||
appsDir.forEach((app) => {
|
||||
if (fileExists(`/repos/${config.APPS_REPO_ID}/apps/${app}/config.json`)) {
|
||||
const configFile: AppInfo = readJsonFile(`/repos/${config.APPS_REPO_ID}/apps/${app}/config.json`);
|
||||
if (fileExists(`/repos/${appsRepoId}/apps/${app}/config.json`)) {
|
||||
const configFile: AppInfo = readJsonFile(`/repos/${appsRepoId}/apps/${app}/config.json`);
|
||||
|
||||
if (configFile.available) {
|
||||
apps.push(app);
|
||||
|
@ -136,8 +138,6 @@ export const getAvailableApps = async (): Promise<string[]> => {
|
|||
|
||||
export const getAppInfo = (id: string, status?: AppStatusEnum): AppInfo | null => {
|
||||
try {
|
||||
const repoId = config.APPS_REPO_ID;
|
||||
|
||||
// Check if app is installed
|
||||
const installed = typeof status !== 'undefined' && status !== AppStatusEnum.MISSING;
|
||||
|
||||
|
@ -145,9 +145,9 @@ export const getAppInfo = (id: string, status?: AppStatusEnum): AppInfo | null =
|
|||
const configFile: AppInfo = readJsonFile(`/apps/${id}/config.json`);
|
||||
configFile.description = readFile(`/apps/${id}/metadata/description.md`).toString();
|
||||
return configFile;
|
||||
} else if (fileExists(`/repos/${repoId}/apps/${id}/config.json`)) {
|
||||
const configFile: AppInfo = readJsonFile(`/repos/${repoId}/apps/${id}/config.json`);
|
||||
configFile.description = readFile(`/repos/${repoId}/apps/${id}/metadata/description.md`);
|
||||
} else if (fileExists(`/repos/${appsRepoId}/apps/${id}/config.json`)) {
|
||||
const configFile: AppInfo = readJsonFile(`/repos/${appsRepoId}/apps/${id}/config.json`);
|
||||
configFile.description = readFile(`/repos/${appsRepoId}/apps/${id}/metadata/description.md`);
|
||||
|
||||
if (configFile.available) {
|
||||
return configFile;
|
||||
|
@ -164,13 +164,13 @@ export const getAppInfo = (id: string, status?: AppStatusEnum): AppInfo | null =
|
|||
export const getUpdateInfo = async (id: string) => {
|
||||
const app = await App.findOne({ where: { id } });
|
||||
|
||||
const doesFileExist = fileExists(`/repos/${config.APPS_REPO_ID}/apps/${id}`);
|
||||
const doesFileExist = fileExists(`/repos/${appsRepoId}/apps/${id}`);
|
||||
|
||||
if (!app || !doesFileExist) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const repoConfig: AppInfo = readJsonFile(`/repos/${config.APPS_REPO_ID}/apps/${id}/config.json`);
|
||||
const repoConfig: AppInfo = readJsonFile(`/repos/${appsRepoId}/apps/${id}/config.json`);
|
||||
|
||||
return {
|
||||
current: app.version,
|
||||
|
|
|
@ -4,8 +4,8 @@ import { checkAppRequirements, checkEnvFile, generateEnvFile, getAvailableApps,
|
|||
import { AppInfo, AppStatusEnum, ListAppsResonse } from './apps.types';
|
||||
import App from './app.entity';
|
||||
import logger from '../../config/logger/logger';
|
||||
import config from '../../config';
|
||||
import { Not } from 'typeorm';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
const sortApps = (a: AppInfo, b: AppInfo) => a.name.localeCompare(b.name);
|
||||
|
||||
|
@ -124,7 +124,7 @@ const listApps = async (): Promise<ListAppsResonse> => {
|
|||
const apps: AppInfo[] = folders
|
||||
.map((app) => {
|
||||
try {
|
||||
return readJsonFile(`/repos/${config.APPS_REPO_ID}/apps/${app}/config.json`);
|
||||
return readJsonFile(`/repos/${getConfig().appsRepoId}/apps/${app}/config.json`);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ const listApps = async (): Promise<ListAppsResonse> => {
|
|||
.filter(Boolean);
|
||||
|
||||
apps.forEach((app) => {
|
||||
app.description = readFile(`/repos/${config.APPS_REPO_ID}/apps/${app.id}/metadata/description.md`);
|
||||
app.description = readFile(`/repos/${getConfig().appsRepoId}/apps/${app.id}/metadata/description.md`);
|
||||
});
|
||||
|
||||
return { apps: apps.sort(sortApps), total: apps.length };
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import childProcess from 'child_process';
|
||||
import config from '../../../config';
|
||||
import { getAbsolutePath, readJsonFile, readFile, readdirSync, fileExists, writeFile, createFolder, deleteFolder, runScript, getSeed, ensureAppFolder } from '../fs.helpers';
|
||||
import fs from 'fs-extra';
|
||||
import { getConfig } from '../../../core/config/TipiConfig';
|
||||
import { faker } from '@faker-js/faker';
|
||||
|
||||
jest.mock('fs-extra');
|
||||
|
||||
|
@ -12,7 +13,7 @@ beforeEach(() => {
|
|||
|
||||
describe('Test: getAbsolutePath', () => {
|
||||
it('should return the absolute path', () => {
|
||||
expect(getAbsolutePath('/test')).toBe(`${config.ROOT_FOLDER}/test`);
|
||||
expect(getAbsolutePath('/test')).toBe(`${getConfig().rootFolder}/test`);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -21,7 +22,7 @@ describe('Test: readJsonFile', () => {
|
|||
// Arrange
|
||||
const rawFile = '{"test": "test"}';
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/test-file.json`]: rawFile,
|
||||
[`${getConfig().rootFolder}/test-file.json`]: rawFile,
|
||||
};
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
@ -36,13 +37,29 @@ describe('Test: readJsonFile', () => {
|
|||
it('should return null if the file does not exist', () => {
|
||||
expect(readJsonFile('/test')).toBeNull();
|
||||
});
|
||||
|
||||
it('Should return null if fs.readFile throws an error', () => {
|
||||
// Arrange
|
||||
// @ts-ignore
|
||||
const spy = jest.spyOn(fs, 'readFileSync');
|
||||
spy.mockImplementation(() => {
|
||||
throw new Error('Error');
|
||||
});
|
||||
|
||||
// Act
|
||||
const file = readJsonFile('/test');
|
||||
|
||||
// Assert
|
||||
expect(file).toBeNull();
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: readFile', () => {
|
||||
it('should return the file', () => {
|
||||
const rawFile = 'test';
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/test-file.txt`]: rawFile,
|
||||
[`${getConfig().rootFolder}/test-file.txt`]: rawFile,
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -59,7 +76,7 @@ describe('Test: readFile', () => {
|
|||
describe('Test: readdirSync', () => {
|
||||
it('should return the files', () => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/test/test-file.txt`]: 'test',
|
||||
[`${getConfig().rootFolder}/test/test-file.txt`]: 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -76,7 +93,7 @@ describe('Test: readdirSync', () => {
|
|||
describe('Test: fileExists', () => {
|
||||
it('should return true if the file exists', () => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/test-file.txt`]: 'test',
|
||||
[`${getConfig().rootFolder}/test-file.txt`]: 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -96,7 +113,7 @@ describe('Test: writeFile', () => {
|
|||
|
||||
writeFile('/test-file.txt', 'test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${config.ROOT_FOLDER}/test-file.txt`, 'test');
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/test-file.txt`, 'test');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -106,7 +123,7 @@ describe('Test: createFolder', () => {
|
|||
|
||||
createFolder('/test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${config.ROOT_FOLDER}/test`);
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/test`);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -116,7 +133,7 @@ describe('Test: deleteFolder', () => {
|
|||
|
||||
deleteFolder('/test');
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${config.ROOT_FOLDER}/test`, { recursive: true });
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/test`, { recursive: true });
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -127,14 +144,14 @@ describe('Test: runScript', () => {
|
|||
|
||||
runScript('/test', [], callback);
|
||||
|
||||
expect(spy).toHaveBeenCalledWith(`${config.ROOT_FOLDER}/test`, [], {}, callback);
|
||||
expect(spy).toHaveBeenCalledWith(`${getConfig().rootFolder}/test`, [], {}, callback);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Test: getSeed', () => {
|
||||
it('should return the seed', () => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/state/seed`]: 'test',
|
||||
[`${getConfig().rootFolder}/state/seed`]: 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -147,7 +164,7 @@ describe('Test: getSeed', () => {
|
|||
describe('Test: ensureAppFolder', () => {
|
||||
beforeEach(() => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/repos/${config.APPS_REPO_ID}/apps/test`]: ['test.yml'],
|
||||
[`${getConfig().rootFolder}/repos/${getConfig().appsRepoId}/apps/test`]: ['test.yml'],
|
||||
};
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
@ -158,15 +175,15 @@ describe('Test: ensureAppFolder', () => {
|
|||
ensureAppFolder('test');
|
||||
|
||||
// Assert
|
||||
const files = fs.readdirSync(`${config.ROOT_FOLDER}/apps/test`);
|
||||
const files = fs.readdirSync(`${getConfig().rootFolder}/apps/test`);
|
||||
expect(files).toEqual(['test.yml']);
|
||||
});
|
||||
|
||||
it('should not copy the folder if it already exists', () => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/repos/${config.APPS_REPO_ID}/apps/test`]: ['test.yml'],
|
||||
[`${config.ROOT_FOLDER}/apps/test`]: ['docker-compose.yml'],
|
||||
[`${config.ROOT_FOLDER}/apps/test/docker-compose.yml`]: 'test',
|
||||
[`${getConfig().rootFolder}/repos/${getConfig().appsRepoId}/apps/test`]: ['test.yml'],
|
||||
[`${getConfig().rootFolder}/apps/test`]: ['docker-compose.yml'],
|
||||
[`${getConfig().rootFolder}/apps/test/docker-compose.yml`]: 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -176,15 +193,15 @@ describe('Test: ensureAppFolder', () => {
|
|||
ensureAppFolder('test');
|
||||
|
||||
// Assert
|
||||
const files = fs.readdirSync(`${config.ROOT_FOLDER}/apps/test`);
|
||||
const files = fs.readdirSync(`${getConfig().rootFolder}/apps/test`);
|
||||
expect(files).toEqual(['docker-compose.yml']);
|
||||
});
|
||||
|
||||
it('Should overwrite the folder if clean up is true', () => {
|
||||
const mockFiles = {
|
||||
[`${config.ROOT_FOLDER}/repos/${config.APPS_REPO_ID}/apps/test`]: ['test.yml'],
|
||||
[`${config.ROOT_FOLDER}/apps/test`]: ['docker-compose.yml'],
|
||||
[`${config.ROOT_FOLDER}/apps/test/docker-compose.yml`]: 'test',
|
||||
[`${getConfig().rootFolder}/repos/${getConfig().appsRepoId}/apps/test`]: ['test.yml'],
|
||||
[`${getConfig().rootFolder}/apps/test`]: ['docker-compose.yml'],
|
||||
[`${getConfig().rootFolder}/apps/test/docker-compose.yml`]: 'test',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
|
@ -194,7 +211,26 @@ describe('Test: ensureAppFolder', () => {
|
|||
ensureAppFolder('test', true);
|
||||
|
||||
// Assert
|
||||
const files = fs.readdirSync(`${config.ROOT_FOLDER}/apps/test`);
|
||||
const files = fs.readdirSync(`${getConfig().rootFolder}/apps/test`);
|
||||
expect(files).toEqual(['test.yml']);
|
||||
});
|
||||
|
||||
it('Should delete folder if it exists but has no docker-compose.yml file', () => {
|
||||
// Arrange
|
||||
const randomFileName = `${faker.random.word()}.yml`;
|
||||
const mockFiles = {
|
||||
[`${getConfig().rootFolder}/repos/${getConfig().appsRepoId}/apps/test`]: [randomFileName],
|
||||
[`${getConfig().rootFolder}/apps/test`]: ['test.yml'],
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
fs.__createMockFiles(mockFiles);
|
||||
|
||||
// Act
|
||||
ensureAppFolder('test');
|
||||
|
||||
// Assert
|
||||
const files = fs.readdirSync(`${getConfig().rootFolder}/apps/test`);
|
||||
expect(files).toEqual([randomFileName]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
import fs from 'fs-extra';
|
||||
import childProcess from 'child_process';
|
||||
import config from '../../config';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
|
||||
export const getAbsolutePath = (path: string) => `${config.ROOT_FOLDER}${path}`;
|
||||
export const getAbsolutePath = (path: string) => `${getConfig().rootFolder}${path}`;
|
||||
|
||||
export const readJsonFile = (path: string): any => {
|
||||
const rawFile = fs.readFileSync(getAbsolutePath(path))?.toString();
|
||||
try {
|
||||
const rawFile = fs.readFileSync(getAbsolutePath(path))?.toString();
|
||||
|
||||
if (!rawFile) {
|
||||
if (!rawFile) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return JSON.parse(rawFile);
|
||||
} catch (e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return JSON.parse(rawFile);
|
||||
};
|
||||
|
||||
export const readFile = (path: string): string => {
|
||||
|
@ -50,6 +54,6 @@ export const ensureAppFolder = (appName: string, cleanup = false) => {
|
|||
if (!fileExists(`/apps/${appName}/docker-compose.yml`)) {
|
||||
if (fileExists(`/apps/${appName}`)) deleteFolder(`/apps/${appName}`);
|
||||
// Copy from apps repo
|
||||
fs.copySync(getAbsolutePath(`/repos/${config.APPS_REPO_ID}/apps/${appName}`), getAbsolutePath(`/apps/${appName}`));
|
||||
fs.copySync(getAbsolutePath(`/repos/${getConfig().appsRepoId}/apps/${appName}`), getAbsolutePath(`/apps/${appName}`));
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import axios from 'axios';
|
||||
import config from '../../config';
|
||||
import TipiCache from '../../config/TipiCache';
|
||||
import { getConfig } from '../../core/config/TipiConfig';
|
||||
import { readJsonFile } from '../fs/fs.helpers';
|
||||
|
||||
type SystemInfo = {
|
||||
|
@ -38,9 +38,9 @@ const getVersion = async (): Promise<{ current: string; latest?: string }> => {
|
|||
|
||||
TipiCache.set('latestVersion', version?.replace('v', ''));
|
||||
|
||||
return { current: config.VERSION, latest: version?.replace('v', '') };
|
||||
return { current: getConfig().version, latest: version?.replace('v', '') };
|
||||
} catch (e) {
|
||||
return { current: config.VERSION, latest: undefined };
|
||||
return { current: getConfig().version, latest: undefined };
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import 'reflect-metadata';
|
||||
import express from 'express';
|
||||
import { ApolloServerPluginLandingPageGraphQLPlayground as Playground } from 'apollo-server-core';
|
||||
import config from './config';
|
||||
import { ApolloServer } from 'apollo-server-express';
|
||||
import { createSchema } from './schema';
|
||||
import { ApolloLogs } from './config/logger/apollo.logger';
|
||||
|
@ -17,6 +16,8 @@ import { runUpdates } from './core/updates/run';
|
|||
import recover from './core/updates/recover-migrations';
|
||||
import { cloneRepo, updateRepo } from './helpers/repo-helpers';
|
||||
import startJobs from './core/jobs/jobs';
|
||||
import { applyJsonConfig, getConfig } from './core/config/TipiConfig';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
let corsOptions = {
|
||||
credentials: true,
|
||||
|
@ -27,7 +28,7 @@ let corsOptions = {
|
|||
// disallow requests with no origin
|
||||
if (!origin) return callback(new Error('Not allowed by CORS'), false);
|
||||
|
||||
if (config.CLIENT_URLS.includes(origin)) {
|
||||
if (getConfig().clientUrls.includes(origin)) {
|
||||
return callback(null, true);
|
||||
}
|
||||
|
||||
|
@ -36,12 +37,27 @@ let corsOptions = {
|
|||
},
|
||||
};
|
||||
|
||||
const applyCustomConfig = () => {
|
||||
try {
|
||||
applyJsonConfig();
|
||||
} catch (e) {
|
||||
logger.error('Error applying settings.json config');
|
||||
if (e instanceof ZodError) {
|
||||
Object.keys(e.flatten().fieldErrors).forEach((key) => {
|
||||
logger.error(`Error in field ${key}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
try {
|
||||
applyCustomConfig();
|
||||
|
||||
const app = express();
|
||||
const port = 3001;
|
||||
|
||||
app.use(express.static(`${config.ROOT_FOLDER}/repos/${config.APPS_REPO_ID}`));
|
||||
app.use(express.static(`${getConfig().rootFolder}/repos/${getConfig().appsRepoId}`));
|
||||
app.use(cors(corsOptions));
|
||||
app.use(getSessionMiddleware());
|
||||
|
||||
|
@ -68,22 +84,21 @@ const main = async () => {
|
|||
await datasource.runMigrations();
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
await recover();
|
||||
await recover(datasource);
|
||||
}
|
||||
|
||||
// Run migrations
|
||||
await runUpdates();
|
||||
|
||||
httpServer.listen(port, async () => {
|
||||
await cloneRepo(config.APPS_REPO_URL);
|
||||
await updateRepo(config.APPS_REPO_URL);
|
||||
await cloneRepo(getConfig().appsRepoUrl);
|
||||
await updateRepo(getConfig().appsRepoUrl);
|
||||
startJobs();
|
||||
// Start apps
|
||||
appsService.startAllApps();
|
||||
console.info(`Server running on port ${port} 🚀 Production => ${__prod__}`);
|
||||
logger.info(`Server running on port ${port} 🚀 Production => ${__prod__}`);
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
logger.error(error);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -38,7 +38,8 @@ export const setupConnection = async (testsuite: string): Promise<DataSource> =>
|
|||
entities: [App, User, Update],
|
||||
});
|
||||
|
||||
return AppDataSource.initialize();
|
||||
await AppDataSource.initialize();
|
||||
return AppDataSource;
|
||||
};
|
||||
|
||||
export const teardownConnection = async (testsuite: string): Promise<void> => {
|
||||
|
|
6
pnpm-lock.yaml
generated
6
pnpm-lock.yaml
generated
|
@ -193,6 +193,7 @@ importers:
|
|||
typescript: 4.6.4
|
||||
validator: ^13.7.0
|
||||
winston: ^3.7.2
|
||||
zod: ^3.19.1
|
||||
dependencies:
|
||||
apollo-server-core: 3.10.0_graphql@15.8.0
|
||||
apollo-server-express: 3.9.0_jfj6k5cqxqbusbdzwqjdzioxzm
|
||||
|
@ -226,6 +227,7 @@ importers:
|
|||
typeorm: 0.3.6_pg@8.7.3+ts-node@10.8.2
|
||||
validator: 13.7.0
|
||||
winston: 3.7.2
|
||||
zod: 3.19.1
|
||||
devDependencies:
|
||||
'@faker-js/faker': 7.3.0
|
||||
'@swc/cli': 0.1.57_@swc+core@1.2.210
|
||||
|
@ -12920,6 +12922,10 @@ packages:
|
|||
resolution: {integrity: sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ==}
|
||||
dev: false
|
||||
|
||||
/zod/3.19.1:
|
||||
resolution: {integrity: sha512-LYjZsEDhCdYET9ikFu6dVPGp2YH9DegXjdJToSzD9rO6fy4qiRYFoyEYwps88OseJlPyl2NOe2iJuhEhL7IpEA==}
|
||||
dev: false
|
||||
|
||||
/zustand/3.7.2_react@18.1.0:
|
||||
resolution: {integrity: sha512-PIJDIZKtokhof+9+60cpockVOq05sJzHCriyvaLBmEJixseQ1a5Kdov6fWZfWOu5SK9c+FhH1jU0tntLxRJYMA==}
|
||||
engines: {node: '>=12.7.0'}
|
||||
|
|
|
@ -4,46 +4,28 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
# use greadlink instead of readlink on osx
|
||||
if [[ "$(uname)" == "Darwin" ]]; then
|
||||
rdlk=greadlink
|
||||
else
|
||||
rdlk=readlink
|
||||
cd /runtipi || echo ""
|
||||
# Ensure PWD ends with /runtipi
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please run this script from the runtipi directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$($rdlk -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
REPO_ID="$(echo -n "https://github.com/meienberger/runtipi-appstore" | sha256sum | awk '{print $1}')"
|
||||
STATE_FOLDER="${ROOT_FOLDER}/state"
|
||||
# Root folder in container is /runtipi
|
||||
ROOT_FOLDER="${PWD}"
|
||||
|
||||
show_help() {
|
||||
cat <<EOF
|
||||
app 0.0.1
|
||||
ENV_FILE="${ROOT_FOLDER}/.env"
|
||||
|
||||
CLI for managing Tipi apps
|
||||
|
||||
Usage: app <command> <app> [<arguments>]
|
||||
|
||||
Commands:
|
||||
install Pulls down images for an app and starts it
|
||||
uninstall Removes images and destroys all data for an app
|
||||
stop Stops an installed app
|
||||
start Starts an installed app
|
||||
compose Passes all arguments to Docker Compose
|
||||
ls-installed Lists installed apps
|
||||
EOF
|
||||
}
|
||||
# Root folder in host system
|
||||
ROOT_FOLDER_HOST=$(grep -v '^#' "${ENV_FILE}" | xargs -n 1 | grep ROOT_FOLDER_HOST | cut -d '=' -f2)
|
||||
REPO_ID=$(grep -v '^#' "${ENV_FILE}" | xargs -n 1 | grep APPS_REPO_ID | cut -d '=' -f2)
|
||||
|
||||
# Get field from json file
|
||||
function get_json_field() {
|
||||
local json_file="$1"
|
||||
local field="$2"
|
||||
|
||||
echo $(jq -r ".${field}" "${json_file}")
|
||||
}
|
||||
|
||||
list_installed_apps() {
|
||||
str=$(get_json_field ${STATE_FOLDER}/apps.json installed)
|
||||
echo $str
|
||||
jq -r ".${field}" "${json_file}"
|
||||
}
|
||||
|
||||
if [ -z ${1+x} ]; then
|
||||
|
@ -52,31 +34,11 @@ else
|
|||
command="$1"
|
||||
fi
|
||||
|
||||
# Lists installed apps
|
||||
if [[ "$command" = "ls-installed" ]]; then
|
||||
list_installed_apps
|
||||
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ -z ${2+x} ]; then
|
||||
show_help
|
||||
exit 1
|
||||
else
|
||||
|
||||
app="$2"
|
||||
root_folder_host="${3:-$ROOT_FOLDER}"
|
||||
repo_id="${4:-$REPO_ID}"
|
||||
|
||||
if [[ -z "${repo_id}" ]]; then
|
||||
echo "Error: Repo id not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${root_folder_host}" ]]; then
|
||||
echo "Error: Root folder not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
app_dir="${ROOT_FOLDER}/apps/${app}"
|
||||
|
||||
|
@ -84,7 +46,7 @@ else
|
|||
# copy from repo
|
||||
echo "Copying app from repo"
|
||||
mkdir -p "${app_dir}"
|
||||
cp -r "${ROOT_FOLDER}/repos/${repo_id}/apps/${app}"/* "${app_dir}"
|
||||
cp -r "${ROOT_FOLDER}/repos/${REPO_ID}/apps/${app}"/* "${app_dir}"
|
||||
fi
|
||||
|
||||
app_data_dir="${ROOT_FOLDER}/app-data/${app}"
|
||||
|
@ -99,21 +61,21 @@ fi
|
|||
if [ -z ${3+x} ]; then
|
||||
args=""
|
||||
else
|
||||
args="${@:3}"
|
||||
args="${*:3}"
|
||||
fi
|
||||
|
||||
compose() {
|
||||
local app="${1}"
|
||||
shift
|
||||
|
||||
local architecture="$(uname -m)"
|
||||
arch=$(uname -m)
|
||||
local architecture="${arch}"
|
||||
|
||||
if [[ "$architecture" == "aarch64" ]]; then
|
||||
architecture="arm64"
|
||||
fi
|
||||
|
||||
# App data folder
|
||||
local env_file="${ROOT_FOLDER}/.env"
|
||||
local app_compose_file="${app_dir}/docker-compose.yml"
|
||||
|
||||
# Pick arm architecture if running on arm and if the app has a docker-compose.arm.yml file
|
||||
|
@ -121,19 +83,14 @@ compose() {
|
|||
app_compose_file="${app_dir}/docker-compose.arm.yml"
|
||||
fi
|
||||
|
||||
local common_compose_file="${ROOT_FOLDER}/repos/${repo_id}/apps/docker-compose.common.yml"
|
||||
local common_compose_file="${ROOT_FOLDER}/repos/${REPO_ID}/apps/docker-compose.common.yml"
|
||||
|
||||
# Vars to use in compose file
|
||||
export APP_DATA_DIR="${root_folder_host}/app-data/${app}"
|
||||
export APP_DIR="${app_dir}"
|
||||
export ROOT_FOLDER_HOST="${root_folder_host}"
|
||||
export ROOT_FOLDER="${ROOT_FOLDER}"
|
||||
|
||||
# Docker Compose does not support multiple env files
|
||||
# --env-file "${env_file}" \
|
||||
export APP_DATA_DIR="${ROOT_FOLDER_HOST}/app-data/${app}"
|
||||
export ROOT_FOLDER_HOST="${ROOT_FOLDER_HOST}"
|
||||
|
||||
docker compose \
|
||||
--env-file "${ROOT_FOLDER}/app-data/${app}/app.env" \
|
||||
--env-file "${app_data_dir}/app.env" \
|
||||
--project-name "${app}" \
|
||||
--file "${app_compose_file}" \
|
||||
--file "${common_compose_file}" \
|
||||
|
@ -189,7 +146,7 @@ if [[ "$command" = "update" ]]; then
|
|||
fi
|
||||
|
||||
# Copy app from repo
|
||||
cp -r "${ROOT_FOLDER}/repos/${repo_id}/apps/${app}" "${app_dir}"
|
||||
cp -r "${ROOT_FOLDER}/repos/${REPO_ID}/apps/${app}" "${app_dir}"
|
||||
|
||||
compose "${app}" pull
|
||||
exit
|
||||
|
@ -211,11 +168,8 @@ fi
|
|||
|
||||
# Passes all arguments to Docker Compose
|
||||
if [[ "$command" = "compose" ]]; then
|
||||
compose "${app}" ${args}
|
||||
compose "${app}" "${args}"
|
||||
exit
|
||||
fi
|
||||
|
||||
# If we get here it means no valid command was supplied
|
||||
# Show help and exit
|
||||
show_help
|
||||
exit 1
|
||||
|
|
|
@ -1,16 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
ROOT_FOLDER="$(readlink -f "$(dirname "${BASH_SOURCE[0]}")"/..)"
|
||||
|
||||
echo
|
||||
echo "======================================"
|
||||
if [[ -f "${ROOT_FOLDER}/state/configured" ]]; then
|
||||
echo "=========== RECONFIGURING ============"
|
||||
else
|
||||
echo "============ CONFIGURING ============="
|
||||
fi
|
||||
echo "=============== TIPI ================="
|
||||
echo "======================================"
|
||||
echo
|
||||
|
||||
function install_docker() {
|
||||
local os="${1}"
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
# Don't break if command fails
|
||||
|
||||
# use greadlink instead of readlink on osx
|
||||
if [[ "$(uname)" == "Darwin" ]]; then
|
||||
rdlk=greadlink
|
||||
else
|
||||
rdlk=readlink
|
||||
cd /runtipi || echo ""
|
||||
|
||||
# Ensure PWD ends with /runtipi
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please make sure this script is executed from runtipi/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$($rdlk -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
ROOT_FOLDER="${PWD}"
|
||||
|
||||
show_help() {
|
||||
cat <<EOF
|
||||
|
@ -27,7 +29,7 @@ EOF
|
|||
# Get a static hash based on the repo url
|
||||
function get_hash() {
|
||||
url="${1}"
|
||||
echo $(echo -n "${url}" | sha256sum | awk '{print $1}')
|
||||
echo -n "${url}" | sha256sum | awk '{print $1}'
|
||||
}
|
||||
|
||||
if [ -z ${1+x} ]; then
|
||||
|
@ -65,7 +67,7 @@ if [[ "$command" = "update" ]]; then
|
|||
fi
|
||||
|
||||
echo "Updating ${repo} in ${repo_hash}"
|
||||
cd "${repo_dir}"
|
||||
cd "${repo_dir}" || exit
|
||||
git pull origin master
|
||||
echo "Done"
|
||||
exit
|
||||
|
@ -73,6 +75,6 @@ fi
|
|||
|
||||
if [[ "$command" = "get_hash" ]]; then
|
||||
repo="$2"
|
||||
echo $(get_hash "${repo}")
|
||||
get_hash "${repo}"
|
||||
exit
|
||||
fi
|
||||
|
|
|
@ -5,18 +5,23 @@
|
|||
|
||||
set -e # Exit immediately if a command exits with a non-zero status.
|
||||
|
||||
# use greadlink instead of readlink on osx
|
||||
if [[ "$(uname)" == "Darwin" ]]; then
|
||||
readlink=greadlink
|
||||
else
|
||||
readlink=readlink
|
||||
fi
|
||||
|
||||
NGINX_PORT=80
|
||||
NGINX_PORT_SSL=443
|
||||
PROXY_PORT=8080
|
||||
DOMAIN=tipi.localhost
|
||||
|
||||
# Check we are on linux
|
||||
if [[ "$(uname)" != "Linux" ]]; then
|
||||
echo "Tipi only works on Linux"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure BASH_SOURCE is ./scripts/start.sh
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please make sure this script is executed from runtipi/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
NETWORK_INTERFACE="$(ip route | grep default | awk '{print $5}' | uniq)"
|
||||
INTERNAL_IP="$(ip addr show "${NETWORK_INTERFACE}" | grep "inet " | awk '{print $2}' | cut -d/ -f1)"
|
||||
|
||||
|
@ -88,33 +93,21 @@ while [ -n "$1" ]; do # while loop starts
|
|||
shift
|
||||
done
|
||||
|
||||
# Ensure BASH_SOURCE is ./scripts/start.sh
|
||||
if [[ $(basename $(pwd)) != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please make sure this script is executed from runtipi/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check we are on linux
|
||||
if [[ "$(uname)" != "Linux" ]]; then
|
||||
echo "Tipi only works on Linux"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If port is not 80 and domain is not tipi.localhost, we exit
|
||||
if [[ "${NGINX_PORT}" != "80" ]] && [[ "${DOMAIN}" != "tipi.localhost" ]]; then
|
||||
echo "Using a custom domain with a custom port is not supported"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$($readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
ROOT_FOLDER="${PWD}"
|
||||
STATE_FOLDER="${ROOT_FOLDER}/state"
|
||||
SED_ROOT_FOLDER="$(echo $ROOT_FOLDER | sed 's/\//\\\//g')"
|
||||
SED_ROOT_FOLDER="$(echo "$ROOT_FOLDER" | sed 's/\//\\\//g')"
|
||||
|
||||
DNS_IP=9.9.9.9 # Default to Quad9 DNS
|
||||
ARCHITECTURE="$(uname -m)"
|
||||
TZ="$(timedatectl | grep "Time zone" | awk '{print $3}' | sed 's/\//\\\//g' || Europe\/Berlin)"
|
||||
APPS_REPOSITORY="https://github.com/meienberger/runtipi-appstore"
|
||||
REPO_ID="$(${ROOT_FOLDER}/scripts/git.sh get_hash ${APPS_REPOSITORY})"
|
||||
REPO_ID="$("${ROOT_FOLDER}"/scripts/git.sh get_hash ${APPS_REPOSITORY})"
|
||||
APPS_REPOSITORY_ESCAPED="$(echo ${APPS_REPOSITORY} | sed 's/\//\\\//g')"
|
||||
|
||||
if [[ "$ARCHITECTURE" == "aarch64" ]]; then
|
||||
|
@ -136,7 +129,7 @@ function get_json_field() {
|
|||
local json_file="$1"
|
||||
local field="$2"
|
||||
|
||||
echo $(jq -r ".${field}" "${json_file}")
|
||||
jq -r ".${field}" "${json_file}"
|
||||
}
|
||||
|
||||
# Deterministically derives 128 bits of cryptographically secure entropy
|
||||
|
@ -164,10 +157,13 @@ if [[ -f "/etc/resolv.conf" ]]; then
|
|||
TEMP=$(grep -E -o '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' /etc/resolv.conf | head -n 1)
|
||||
fi
|
||||
|
||||
# Clean logs folder
|
||||
rm -rf "${ROOT_FOLDER}/logs/*"
|
||||
|
||||
# Create seed file with cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1
|
||||
if [[ ! -f "${STATE_FOLDER}/seed" ]]; then
|
||||
echo "Generating seed..."
|
||||
cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1 >"${STATE_FOLDER}/seed"
|
||||
tr </dev/urandom -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1 >"${STATE_FOLDER}/seed"
|
||||
fi
|
||||
|
||||
export DOCKER_CLIENT_TIMEOUT=240
|
||||
|
@ -187,6 +183,31 @@ JWT_SECRET=$(derive_entropy "jwt")
|
|||
POSTGRES_PASSWORD=$(derive_entropy "postgres")
|
||||
TIPI_VERSION=$(get_json_field "${ROOT_FOLDER}/package.json" version)
|
||||
|
||||
# Override vars with values from settings.json
|
||||
if [[ -f "${STATE_FOLDER}/settings.json" ]]; then
|
||||
|
||||
# If dnsIp is set in settings.json, use it
|
||||
if [[ "$(get_json_field "${STATE_FOLDER}/settings.json" dnsIp)" != "null" ]]; then
|
||||
DNS_IP=$(get_json_field "${STATE_FOLDER}/settings.json" dnsIp)
|
||||
fi
|
||||
|
||||
# If domain is set in settings.json, use it
|
||||
if [[ "$(get_json_field "${STATE_FOLDER}/settings.json" domain)" != "null" ]]; then
|
||||
DOMAIN=$(get_json_field "${STATE_FOLDER}/settings.json" domain)
|
||||
fi
|
||||
|
||||
# If appsRepoUrl is set in settings.json, use it
|
||||
if [[ "$(get_json_field "${STATE_FOLDER}/settings.json" appsRepoUrl)" != "null" ]]; then
|
||||
APPS_REPOSITORY_ESCAPED="$(echo ${APPS_REPOSITORY} | sed 's/\//\\\//g')"
|
||||
fi
|
||||
|
||||
# If appsRepoId is set in settings.json, use it
|
||||
if [[ "$(get_json_field "${STATE_FOLDER}/settings.json" appsRepoId)" != "null" ]]; then
|
||||
REPO_ID=$(get_json_field "${STATE_FOLDER}/settings.json" appsRepoId)
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
echo "Creating .env file with the following values:"
|
||||
echo " DOMAIN=${DOMAIN}"
|
||||
echo " INTERNAL_IP=${INTERNAL_IP}"
|
||||
|
|
|
@ -1,13 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# use greadlink instead of readlink on osx
|
||||
if [[ "$(uname)" == "Darwin" ]]; then
|
||||
readlink=greadlink
|
||||
else
|
||||
readlink=readlink
|
||||
fi
|
||||
|
||||
if [[ $UID != 0 ]]; then
|
||||
echo "Tipi must be stopped as root"
|
||||
echo "Please re-run this script as"
|
||||
|
@ -15,17 +8,20 @@ if [[ $UID != 0 ]]; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$($readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
STATE_FOLDER="${ROOT_FOLDER}/state"
|
||||
# Ensure PWD ends with /runtipi
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please run this script from the runtipi directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$ROOT_FOLDER"
|
||||
ROOT_FOLDER="${PWD}"
|
||||
|
||||
export DOCKER_CLIENT_TIMEOUT=240
|
||||
export COMPOSE_HTTP_TIMEOUT=240
|
||||
|
||||
# Stop all installed apps if there are any
|
||||
apps_folder="${ROOT_FOLDER}/apps"
|
||||
if [ "$(find ${apps_folder} -maxdepth 1 -type d | wc -l)" -gt 1 ]; then
|
||||
if [ "$(find "${apps_folder}" -maxdepth 1 -type d | wc -l)" -gt 1 ]; then
|
||||
apps_names=($(ls -d ${apps_folder}/*/ | xargs -n 1 basename | sed 's/\///g'))
|
||||
|
||||
for app_name in "${apps_names[@]}"; do
|
||||
|
|
|
@ -1,25 +1,32 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e # Exit immediately if a command exits with a non-zero status.
|
||||
|
||||
ROOT_FOLDER="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
cd /runtipi || echo ""
|
||||
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please make sure this script is executed from runtipi/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$(pwd)"
|
||||
STATE_FOLDER="${ROOT_FOLDER}/state"
|
||||
|
||||
# Available disk space
|
||||
TOTAL_DISK_SPACE_BYTES=$(df -P -B 1 / | tail -n 1 | awk '{print $2}')
|
||||
AVAILABLE_DISK_SPACE_BYTES=$(df -P -B 1 / | tail -n 1 | awk '{print $4}')
|
||||
USED_DISK_SPACE_BYTES=$(($TOTAL_DISK_SPACE_BYTES - $AVAILABLE_DISK_SPACE_BYTES))
|
||||
USED_DISK_SPACE_BYTES=$((TOTAL_DISK_SPACE_BYTES - AVAILABLE_DISK_SPACE_BYTES))
|
||||
|
||||
# CPU info
|
||||
CPU_LOAD_PERCENTAGE=$(top -bn1 | grep "Cpu(s)" | sed "s/.*, *\([0-9.]*\)%* id.*/\1/" | awk '{print 100 - $1}')
|
||||
|
||||
# Memory info
|
||||
MEM_TOTAL_BYTES=$(($(cat /proc/meminfo | grep MemTotal | awk '{print $2}') * 1024))
|
||||
MEM_AVAILABLE_BYTES=$(($(cat /proc/meminfo | grep MemAvailable | awk '{print $2}') * 1024))
|
||||
MEM_USED_BYTES=$(($MEM_TOTAL_BYTES - $MEM_AVAILABLE_BYTES))
|
||||
MEM_TOTAL_BYTES=$(($(grep </proc/meminfo MemTotal | awk '{print $2}') * 1024))
|
||||
MEM_AVAILABLE_BYTES=$(($(grep </proc/meminfo MemAvailable | awk '{print $2}') * 1024))
|
||||
MEM_USED_BYTES=$((MEM_TOTAL_BYTES - MEM_AVAILABLE_BYTES))
|
||||
|
||||
# Create temporary json file
|
||||
TEMP_JSON_FILE=$(mktemp)
|
||||
echo '{ "cpu": { "load": '"${CPU_LOAD_PERCENTAGE}"' }, "memory": { "total": '"${MEM_TOTAL_BYTES}"' , "used": '"${MEM_USED_BYTES}"', "available": '"${MEM_AVAILABLE_BYTES}"' }, "disk": { "total": '"${TOTAL_DISK_SPACE_BYTES}"' , "used": '"${USED_DISK_SPACE_BYTES}"', "available": '"${AVAILABLE_DISK_SPACE_BYTES}"' } }' >"${TEMP_JSON_FILE}"
|
||||
|
||||
# Write to state file
|
||||
echo "$(cat "${TEMP_JSON_FILE}")" >"${STATE_FOLDER}/system-info.json"
|
||||
cat "${TEMP_JSON_FILE}" >"${STATE_FOLDER}/system-info.json"
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
|
||||
# Prompt to confirm
|
||||
echo "This will reset your system to factory defaults. Are you sure you want to do this? (y/n)"
|
||||
read confirm
|
||||
read -r confirm
|
||||
if [ "$confirm" != "y" ]; then
|
||||
echo "Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ROOT_FOLDER="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
|
||||
ROOT_FOLDER="$(readlink -f "$(dirname "${BASH_SOURCE[0]}")"/..)"
|
||||
|
||||
# Stop Tipi
|
||||
"${ROOT_FOLDER}/scripts/stop.sh"
|
||||
|
@ -25,5 +25,5 @@ rm -rf "${ROOT_FOLDER}/app-data"
|
|||
rm -rf "${ROOT_FOLDER}/data/postgres"
|
||||
mkdir -p "${ROOT_FOLDER}/app-data"
|
||||
|
||||
cd "$ROOT_FOLDER"
|
||||
cd "$ROOT_FOLDER" || echo ""
|
||||
"${ROOT_FOLDER}/scripts/start.sh"
|
||||
|
|
33
scripts/utils.sh
Executable file
33
scripts/utils.sh
Executable file
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
cd /runtipi || echo ""
|
||||
|
||||
# Ensure PWD ends with /runtipi
|
||||
if [[ $(basename "$(pwd)") != "runtipi" ]] || [[ ! -f "${BASH_SOURCE[0]}" ]]; then
|
||||
echo "Please make sure this script is executed from runtipi/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z ${1+x} ]; then
|
||||
command=""
|
||||
else
|
||||
command="$1"
|
||||
fi
|
||||
|
||||
# Restart Tipi
|
||||
if [[ "$command" = "restart" ]]; then
|
||||
echo "Restarting Tipi..."
|
||||
|
||||
scripts/stop.sh
|
||||
scripts/start.sh
|
||||
|
||||
exit
|
||||
fi
|
||||
|
||||
# Update Tipi
|
||||
if [[ "$command" = "update" ]]; then
|
||||
scripts/stop.sh
|
||||
git pull origin master
|
||||
scripts/start.sh
|
||||
exit
|
||||
fi
|
Loading…
Add table
Reference in a new issue