Merge develop

This commit is contained in:
Nicolas Meienberger 2022-05-17 23:21:25 +02:00
commit 153f537362
58 changed files with 1162 additions and 1216 deletions

53
.github/workflows/build-images.yml vendored Normal file
View file

@ -0,0 +1,53 @@
name: Docker build
on:
push:
branches:
- 'master'
jobs:
docker:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v3
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Get tag from VERSION file
id: meta
run: |
VERSION=$(cat VERSION)
TAG=${VERSION}
echo "::set-output name=tag::${TAG}"
-
name: Build and push dashboard
uses: docker/build-push-action@v2
with:
context: ./packages/dashboard
platforms: linux/amd64,linux/arm64
push: true
tags: meienberger/tipi-dashboard:latest,meienberger/tipi-dashboard:${{ steps.meta.outputs.TAG }}
cache-from: type=registry,ref=meienberger/tipi-dashboard:latest
cache-to: type=inline
-
name: Build and push api
uses: docker/build-push-action@v2
with:
context: ./packages/system-api
platforms: linux/amd64,linux/arm64
push: true
tags: meienberger/tipi-api:latest,meienberger/tipi-api:${{ steps.meta.outputs.TAG }}
cache-from: type=registry,ref=meienberger/tipi-api:latest
cache-to: type=inline

View file

@ -5,6 +5,7 @@ on:
env:
ROOT_FOLDER: /test
JWT_SECRET: "secret"
ROOT_FOLDER_HOST: /tipi
jobs:
ci:
@ -47,6 +48,7 @@ jobs:
- name: Run tests
run: pnpm -r test
# Test installation script
test-install:
runs-on: ubuntu-latest
steps:
@ -55,16 +57,5 @@ jobs:
- name: Create user and group
run: useradd -u 1000 test
- uses: actions/setup-python@v3
with:
python-version: '3.9'
cache: 'pip'
# This is normally done in the start script but
# we need to do it here to cache the dependency
- name: Install ansible
run: pip install -r requirements.txt
- name: Run install script
run: sudo ./scripts/start.sh

34
.github/workflows/verify-release.yml vendored Normal file
View file

@ -0,0 +1,34 @@
name: Verify release
on:
pull_request:
branches:
- master
jobs:
verify:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: technote-space/get-diff-action@v6
with:
FILES: |
VERSION
- name: Ensure env.MATCHED_FILES has VERSION in it
id: check-version
run: |
if [[ -z "${{ env.MATCHED_FILES }}" ]]; then
echo "::error::VERSION not modified"
exit 1
fi
if [[ ! "${{ env.MATCHED_FILES }}" =~ VERSION ]]; then
echo "::error::VERSION not modified"
exit 1
fi

1
.gitignore vendored
View file

@ -1,6 +1,7 @@
.pnpm-debug.log
.env
.env*
github.secrets
node_modules/
nginx/*
letsencrypt/*

1
VERSION Normal file
View file

@ -0,0 +1 @@
0.1.3

View file

@ -1,7 +1,4 @@
packages:
- jq
- ufw
- coreutils
- git
- docker
- iptables

View file

@ -1,6 +0,0 @@
---
- hosts: tipi
become: yes
tasks:
- import_tasks: ./tasks/common/packages.yml

View file

@ -21,11 +21,11 @@
when: lsb_release.stdout == 'Debian'
- name: Add deb repo for docker (Ubuntu)
shell: echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
shell: echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
when: lsb_release.stdout == 'Ubuntu'
- name: Add deb repo for docker (Debian)
shell: echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
shell: echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
when: lsb_release.stdout == 'Debian'
- name: Update packages

View file

@ -3,12 +3,6 @@
update_cache: yes
upgrade: yes
- name: Install node 16
shell: curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
- name: Install node
shell: apt-get install -y nodejs
- name: Install essential packages
package:
name: "{{ packages }}"
@ -30,36 +24,10 @@
line: "{{ username }} ALL=(ALL) NOPASSWD: ALL"
validate: "/usr/sbin/visudo -cf %s"
- name: Allow SSH in UFW
community.general.ufw:
rule: allow
port: 22
proto: tcp
- name: Allow port 80 in UFW
community.general.ufw:
rule: allow
port: 80
proto: tcp
- name: Allow port 443 in UFW
community.general.ufw:
rule: allow
port: 443
proto: tcp
- name: Allow ports for apps
community.general.ufw:
rule: allow
port: 3000:3001
proto: tcp
- name: Enable ufw daemon
service:
name: ufw
state: started
enabled: yes
- name: Enable UFW
community.general.ufw:
state: enabled
- name: Create cron every minute running system-info.sh
cron:
name: "system-info"
user: "{{ username }}"
minute: "*/1"
job: "{{ playbook_dir }}/../scripts/system-info.sh"
ignore_errors: yes

View file

@ -1,40 +0,0 @@
- name: Install "pm2" package globally.
community.general.npm:
name: pm2
global: yes
- name: Install "pnpm" package globally.
community.general.npm:
name: pnpm
global: yes
- name: Run pm2 first time
shell: pm2 list
- name: Enable pm2 as a service
shell: sudo env PATH=$PATH:/usr/local/bin pm2 startup -u {{ username }}
- name: Install dependencies
shell: cd {{ playbook_dir }} && pnpm install
- name: Clean packages
shell: cd {{ playbook_dir }} && pnpm -r clean
- name: Build packages
become_user: "{{ username }}"
shell: cd {{ playbook_dir }} && pnpm -r build-prod
- name: Check if app is already running
become_user: "{{ username }}"
shell: pm2 status system-api
register: pm2_result
- name: Start app
become_user: "{{ username }}"
shell: cd {{ playbook_dir }}/../packages/system-api && pm2 start npm --name "system-api" -- start
when: pm2_result.stdout.find("online") == -1
- name: Reload app
become_user: "{{ username }}"
shell: pm2 reload system-api
when: pm2_result.stdout.find("online") != -1

View file

@ -1,9 +1,18 @@
- name: Check if pm2 is installed
become_user: "{{ username }}"
stat:
path: /usr/local/bin/pm2
register: pm2_status
- name: Check if app is already running
become_user: "{{ username }}"
shell: pm2 list
register: pm2_result
when: pm2_status.stat.exists
- name: Stop app
become_user: "{{ username }}"
shell: pm2 stop "system-api"
when: pm2_result.stdout.find("system-api") != -1
when:
- pm2_status.stat.exists
- pm2_result.stdout.find("system-api") != -1

View file

@ -3,7 +3,7 @@
"available": true,
"port": 8096,
"id": "filebrowser",
"description": "Reliable and Performant File Management Desktop Sync and File Sharing",
"description": "Reliable and Performant File Management Desktop Sync and File Sharing\n Default credentials: admin / admin",
"short_desc": "Access your homeserver files from your browser",
"author": "",
"website": "https://filebrowser.org/",

View file

@ -8,8 +8,8 @@ services:
- PUID=1000
- PGID=1000
volumes:
- ${ROOT_FOLDER}:/srv
- ${APP_DATA_DIR}/data/filebrowser.db:/database/filebrowser.db
- ${APP_DATA_DIR}/data/settings.json:/config/settings.json
- ${APP_DATA_DIR}/../..:/srv
- ${APP_DATA_DIR}/data/db:/database
- ${APP_DATA_DIR}/data/config:/config
networks:
- tipi_main_network

View file

@ -33,6 +33,6 @@ services:
ports:
- ${APP_PORT}:80
volumes:
- ${ROOT_FOLDER}/app-data/medias:/user-files
- ${ROOT_FOLDER_HOST}/app-data/medias:/user-files
networks:
- tipi_main_network

View file

@ -12,7 +12,7 @@ services:
- ${DNS_IP}
volumes:
- ${APP_DATA_DIR}/data:/config
- ${ROOT_FOLDER}/media/torrents:/downloads
- ${ROOT_FOLDER_HOST}/media/torrents:/downloads
ports:
- ${APP_PORT}:9117
restart: unless-stopped

View file

@ -6,7 +6,7 @@ services:
container_name: jellyfin
volumes:
- ${APP_DATA_DIR}/data/config:/config
- ${ROOT_FOLDER}/media/data:/data/media
- ${ROOT_FOLDER_HOST}/media/data:/data/media
environment:
- PUID=1000
- PGID=1000

View file

@ -3,7 +3,7 @@
"available": true,
"port": 8099,
"id": "joplin",
"description": "",
"description": "Default credentials: admin@localhost / admin",
"short_desc": "Note taking and to-do application with synchronisation",
"author": "https://github.com/laurent22",
"source": "https://github.com/laurent22/joplin",

View file

@ -52,7 +52,7 @@ services:
- POSTGRES_DB=nextcloud
- NEXTCLOUD_ADMIN_USER=${NEXTCLOUD_ADMIN_USER}
- NEXTCLOUD_ADMIN_PASSWORD=${NEXTCLOUD_ADMIN_PASSWORD}
- NEXTCLOUD_TRUSTED_DOMAINS=${DEVICE_IP}:${APP_PORT}
- NEXTCLOUD_TRUSTED_DOMAINS=${INTERNAL_IP}:${APP_PORT}
depends_on:
- db-nextcloud
- redis-nextcloud

View file

@ -11,8 +11,8 @@ services:
- ${DNS_IP}
volumes:
- ${APP_DATA_DIR}/data:/config
- ${ROOT_FOLDER}/media/data/movies:/movies #optional
- ${ROOT_FOLDER}/media/torrents:/downloads #optional
- ${ROOT_FOLDER_HOST}/media/data/movies:/movies #optional
- ${ROOT_FOLDER_HOST}/media/torrents:/downloads #optional
ports:
- ${APP_PORT}:7878
restart: unless-stopped

View file

@ -11,8 +11,8 @@ services:
- ${DNS_IP}
volumes:
- ${APP_DATA_DIR}/data:/config
- ${ROOT_FOLDER}/media/data/tv:/tv #optional
- ${ROOT_FOLDER}/media/torrents:/downloads #optional
- ${ROOT_FOLDER_HOST}/media/data/tv:/tv #optional
- ${ROOT_FOLDER_HOST}/media/torrents:/downloads #optional
ports:
- ${APP_PORT}:8989
restart: unless-stopped

View file

@ -14,7 +14,7 @@ services:
# - HOST_WHITELIST=dnsnane list #optional
volumes:
- ${APP_DATA_DIR}/data/config:/config
- ${ROOT_FOLDER}/media/torrents:/downloads
- ${ROOT_FOLDER_HOST}/media/torrents:/downloads
ports:
- ${APP_PORT}:9091
- 51413:51413

55
docker-compose.dev.yml Normal file
View file

@ -0,0 +1,55 @@
version: "3.7"
services:
api:
build:
context: ./packages/system-api
dockerfile: Dockerfile.dev
container_name: api
ports:
- 3001:3001
volumes:
## Docker sock
- /var/run/docker.sock:/var/run/docker.sock:ro
- ${PWD}:/tipi
- ${PWD}/packages/system-api:/app
- /app/node_modules
environment:
- INTERNAL_IP=${INTERNAL_IP}
- TIPI_VERSION=${TIPI_VERSION}
- JWT_SECRET=${JWT_SECRET}
- ROOT_FOLDER_HOST=${ROOT_FOLDER_HOST}
networks:
- tipi_main_network
dashboard:
build:
context: ./packages/dashboard
dockerfile: Dockerfile.dev
container_name: dashboard
ports:
- 3000:3000
networks:
- tipi_main_network
environment:
- INTERNAL_IP=${INTERNAL_IP}
volumes:
- ${PWD}/packages/dashboard:/app
- /app/node_modules
labels:
traefik.enable: true
traefik.http.routers.dashboard.rule: PathPrefix("/") # Host(`tipi.local`) &&
traefik.http.routers.dashboard.entrypoints: webinsecure
traefik.http.routers.dashboard.service: dashboard
traefik.http.services.dashboard.loadbalancer.server.port: 3000
networks:
tipi_main_network:
driver: bridge
driver_opts:
com.docker.network.bridge.enable_ip_masquerade: "true"
com.docker.network.bridge.enable_icc: "true"
ipam:
driver: default
config:
- subnet: 10.21.21.0/24

View file

@ -15,19 +15,33 @@ services:
networks:
- tipi_main_network
dashboard:
build:
context: ./packages/dashboard
dockerfile: Dockerfile
args:
INTERNAL_IP_ARG: ${INTERNAL_IP}
container_name: dashboard
api:
image: meienberger/tipi-api:${TIPI_VERSION}
container_name: api
ports:
- 3001:3001
volumes:
- ${PWD}/state:/app/state
## Docker sock
- /var/run/docker.sock:/var/run/docker.sock:ro
- ${PWD}:/tipi
environment:
- INTERNAL_IP=${INTERNAL_IP}
- TIPI_VERSION=${TIPI_VERSION}
- JWT_SECRET=${JWT_SECRET}
- ROOT_FOLDER_HOST=${ROOT_FOLDER_HOST}
networks:
- tipi_main_network
dashboard:
image: meienberger/tipi-dashboard:${TIPI_VERSION}
container_name: dashboard
ports:
- 3000:3000
networks:
- tipi_main_network
environment:
- INTERNAL_IP=${INTERNAL_IP}
labels:
traefik.enable: true
traefik.http.routers.dashboard.rule: PathPrefix("/") # Host(`tipi.local`) &&

View file

@ -1,31 +1,15 @@
{
"name": "runtipi",
"version": "0.1.2",
"version": "0.1.3",
"description": "A homeserver for everyone",
"scripts": {
"prepare": "husky install",
"act": "act --container-architecture linux/amd64 -j test-install"
},
"dependencies": {
"eslint": "^8.15.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-airbnb-typescript": "^17.0.0",
"eslint-config-next": "^12.1.4",
"eslint-config-prettier": "^8.5.0",
"eslint-import-resolver-node": "^0.3.4",
"eslint-import-resolver-typescript": "^2.4.0",
"eslint-module-utils": "^2.7.3",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jsx-a11y": "^6.5.1",
"eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-react": "^7.29.1",
"eslint-plugin-react-hooks": "^4.3.0",
"eslint-scope": "^7.1.1",
"eslint-utils": "^3.0.0",
"eslint-visitor-keys": "^3.3.0",
"prettier": "^2.6.2",
"prettier-linter-helpers": "^1.0.0"
"act:test-install": "act --container-architecture linux/amd64 -j test-install",
"act:docker": "act --container-architecture linux/amd64 --secret-file github.secrets -j docker",
"start:dev": "docker-compose -f docker-compose.dev.yml --env-file .env.dev up --build",
"start:prod": "docker-compose --env-file .env up --build"
},
"dependencies": {},
"devDependencies": {
"husky": "^8.0.1"
},

View file

@ -1,2 +1,3 @@
*.config.js
.eslintrc.js
.eslintrc.js
next.config.js

View file

@ -8,9 +8,6 @@ RUN yarn
COPY ./ ./
ARG INTERNAL_IP_ARG
ENV INTERNAL_IP $INTERNAL_IP_ARG
RUN yarn build
CMD ["yarn", "start"]

View file

@ -3,7 +3,6 @@ FROM node:latest
WORKDIR /app
COPY ./package.json ./
COPY ./yarn.lock ./
RUN yarn

View file

@ -4,7 +4,7 @@ const { NODE_ENV, INTERNAL_IP } = process.env;
const nextConfig = {
reactStrictMode: true,
env: {
INTERNAL_IP: NODE_ENV === 'development' ? 'localhost' : INTERNAL_IP,
INTERNAL_IP: INTERNAL_IP,
},
};

View file

@ -1,6 +1,6 @@
{
"name": "dashboard",
"version": "0.1.2",
"version": "0.1.3",
"private": true,
"scripts": {
"dev": "next dev",
@ -9,7 +9,7 @@
"lint": "next lint"
},
"dependencies": {
"@chakra-ui/react": "^1.8.7",
"@chakra-ui/react": "^2.0.2",
"@emotion/react": "^11",
"@emotion/styled": "^11",
"@fontsource/open-sans": "^4.5.8",

View file

@ -1,6 +1,5 @@
import axios, { Method } from 'axios';
export const BASE_URL = `http://${process.env.INTERNAL_IP}:3001`;
import { useSytemStore } from '../state/systemStore';
interface IFetchParams {
endpoint: string;
@ -12,6 +11,9 @@ interface IFetchParams {
const api = async <T = unknown>(fetchParams: IFetchParams): Promise<T> => {
const { endpoint, method = 'GET', params, data } = fetchParams;
const { getState } = useSytemStore;
const BASE_URL = `http://${getState().internalIp}:3001`;
const response = await axios.request<T & { error?: string }>({
method,
params,

View file

@ -1,8 +1,11 @@
import { BareFetcher } from 'swr';
import axios from 'axios';
import { BASE_URL } from './api';
import { useSytemStore } from '../state/systemStore';
const fetcher: BareFetcher<any> = (url: string) => {
const { getState } = useSytemStore;
const BASE_URL = `http://${getState().internalIp}:3001`;
return axios.get(url, { baseURL: BASE_URL, withCredentials: true }).then((res) => res.data);
};

View file

@ -3,6 +3,7 @@ import React from 'react';
import { FiExternalLink } from 'react-icons/fi';
import { AppConfig } from '../../../core/types';
import { useAppsStore } from '../../../state/appsStore';
import { useSytemStore } from '../../../state/systemStore';
import AppActions from '../components/AppActions';
import InstallModal from '../components/InstallModal';
import StopModal from '../components/StopModal';
@ -21,6 +22,7 @@ const AppDetails: React.FC<IProps> = ({ app }) => {
const updateDisclosure = useDisclosure();
const { install, update, uninstall, stop, start, fetchApp } = useAppsStore();
const { internalIp } = useSytemStore();
const handleError = (error: unknown) => {
if (error instanceof Error) {
@ -86,7 +88,7 @@ const AppDetails: React.FC<IProps> = ({ app }) => {
};
const handleOpen = () => {
window.open(`http://${process.env.INTERNAL_IP}:${app.port}`, '_blank');
window.open(`http://${internalIp}:${app.port}`, '_blank');
};
return (

View file

@ -1,6 +1,9 @@
import axios from 'axios';
import React, { useEffect, useState } from 'react';
import useSWR, { BareFetcher } from 'swr';
import LoadingScreen from '../../../components/LoadingScreen';
import { useAuthStore } from '../../../state/authStore';
import { useSytemStore } from '../../../state/systemStore';
import Login from './Login';
import Onboarding from './Onboarding';
@ -8,9 +11,16 @@ interface IProps {
children: React.ReactNode;
}
const fetcher: BareFetcher<any> = (url: string) => {
return axios.get(url).then((res) => res.data);
};
const AuthWrapper: React.FC<IProps> = ({ children }) => {
const [initialLoad, setInitialLoad] = useState(true);
const { configured, user, me, fetchConfigured } = useAuthStore();
const { internalIp, setInternalIp } = useSytemStore();
const { data } = useSWR('/api/ip', fetcher);
useEffect(() => {
const fetchUser = async () => {
@ -19,8 +29,14 @@ const AuthWrapper: React.FC<IProps> = ({ children }) => {
setInitialLoad(false);
};
if (!user) fetchUser();
}, [fetchConfigured, me, user]);
if (!user && internalIp) fetchUser();
}, [fetchConfigured, internalIp, me, user]);
useEffect(() => {
if (data?.ip && !internalIp) {
setInternalIp(data.ip);
}
}, [data?.ip, internalIp, setInternalIp]);
if (initialLoad && !user) {
return <LoadingScreen />;

View file

@ -25,12 +25,12 @@ const Dashboard: React.FC = () => {
// Convert bytes to GB
const diskFree = Math.round(disk.available / 1024 / 1024 / 1024);
const diskSize = Math.round(disk.size / 1024 / 1024 / 1024);
const diskSize = Math.round(disk.total / 1024 / 1024 / 1024);
const diskUsed = diskSize - diskFree;
const percentUsed = Math.round((diskUsed / diskSize) * 100);
const memoryTotal = Math.round(memory?.total / 1024 / 1024 / 1024);
const memoryFree = Math.round(memory?.free / 1024 / 1024 / 1024);
const memoryFree = Math.round(memory?.available / 1024 / 1024 / 1024);
const percentUsedMemory = Math.round(((memoryTotal - memoryFree) / memoryTotal) * 100);
return (

View file

@ -0,0 +1,5 @@
export default function handler(_: any, res: any) {
const { INTERNAL_IP } = process.env;
res.status(200).json({ ip: INTERNAL_IP });
}

View file

@ -3,26 +3,25 @@ import { Text } from '@chakra-ui/react';
import useSWR from 'swr';
import Layout from '../components/Layout';
import fetcher from '../core/fetcher';
import Package from '../../package.json';
const Settings: NextPage = () => {
const { data: latestVersion } = useSWR<string>('/system/version/latest', fetcher);
const { data } = useSWR<{ current: string; latest: string }>('/system/version', fetcher);
const isLatest = latestVersion === `v${Package.version}`;
const isLatest = data?.latest === data?.current;
const renderUpdate = () => {
if (isLatest) {
return (
<Text fontSize="md" color="green.500">
Your Tipi install is up to date. Version {Package.version}
Your Tipi install is up to date. Version {data?.current}
</Text>
);
}
return (
<Text fontSize="md">
You are not using the latest version of Tipi. There is a new version ({latestVersion}) available. Visit{' '}
<a className="text-blue-600" target="_blank" rel="noreferrer" href={`https://github.com/meienberger/runtipi/releases/${latestVersion}`}>
You are not using the latest version of Tipi. There is a new version ({data?.latest}) available. Visit{' '}
<a className="text-blue-600" target="_blank" rel="noreferrer" href={`https://github.com/meienberger/runtipi/releases/v${data?.latest}`}>
Github
</a>{' '}
for update instructions.
@ -31,7 +30,7 @@ const Settings: NextPage = () => {
};
return (
<Layout loading={!latestVersion}>
<Layout loading={!data}>
<Text fontSize="3xl" className="font-bold">
Settings
</Text>

View file

@ -3,17 +3,21 @@ import api from '../core/api';
type Store = {
cpuLoad: number;
disk: { size: number; used: number; available: number };
memory: { total: number; used: number; free: number };
internalIp: string;
disk: { total: number; used: number; available: number };
memory: { total: number; used: number; available: number };
fetchDiskSpace: () => void;
fetchCpuLoad: () => void;
fetchMemoryLoad: () => void;
setInternalIp: (internalIp: string) => void;
};
export const useSytemStore = create<Store>((set) => ({
cpuLoad: 0,
memory: { total: 0, used: 0, free: 0 },
disk: { size: 0, used: 0, available: 0 },
internalIp: '',
setInternalIp: (internalIp: string) => set((state) => ({ ...state, internalIp })),
memory: { total: 0, used: 0, available: 0 },
disk: { total: 0, used: 0, available: 0 },
fetchDiskSpace: async () => {
const response = await api.fetch<any>({
endpoint: '/system/disk',

View file

@ -0,0 +1,2 @@
node_modules/
dist/

View file

@ -0,0 +1,38 @@
FROM ubuntu:20.04
ARG DEBIAN_FRONTEND=noninteractive
WORKDIR /app
# Install docker
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
gnupg \
lsb-release
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
RUN echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
RUN apt-get update
RUN apt-get install -y docker-ce docker-ce-cli containerd.io
# Install node
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash -
RUN apt-get install -y nodejs
# Install docker-compose
RUN curl -L "https://github.com/docker/compose/releases/download/v2.5.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
RUN chmod +x /usr/local/bin/docker-compose
COPY ./package.json ./
RUN npm install
COPY ./ ./
RUN npm run build
CMD ["npm", "run", "start"]

View file

@ -0,0 +1,36 @@
FROM ubuntu:20.04
ARG DEBIAN_FRONTEND=noninteractive
WORKDIR /app
# Install docker
RUN apt-get update && apt-get install -y \
ca-certificates \
curl \
gnupg \
lsb-release
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
RUN echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
RUN apt-get update
RUN apt-get install -y docker-ce docker-ce-cli containerd.io
# Install node
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash -
RUN apt-get install -y nodejs
# Install docker-compose
RUN curl -L "https://github.com/docker/compose/releases/download/v2.5.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
RUN chmod +x /usr/local/bin/docker-compose
COPY ./package.json ./
RUN npm install
COPY ./ ./
CMD ["npm", "run", "dev"]

View file

@ -1,6 +1,6 @@
{
"name": "system-api",
"version": "0.1.2",
"version": "0.1.3",
"description": "",
"exports": "./dist/server.js",
"type": "module",
@ -12,17 +12,16 @@
"lint": "eslint . --ext .ts",
"test": "jest",
"test:watch": "jest --watch",
"build-prod": "esbuild --bundle src/server.ts --outdir=dist --allow-overwrite --sourcemap --platform=node --minify --analyze=verbose --external:./node_modules/* --format=esm",
"build": "esbuild --bundle src/server.ts --outdir=dist --allow-overwrite --sourcemap --platform=node --minify --analyze=verbose --external:./node_modules/* --format=esm",
"build:watch": "esbuild --bundle src/server.ts --outdir=dist --allow-overwrite --sourcemap --platform=node --external:./node_modules/* --format=esm --watch",
"start:dev": "NODE_ENV=development nodemon --trace-deprecation --trace-warnings --watch dist dist/server.js",
"dev": "concurrently \"yarn build:watch\" \"yarn start:dev\"",
"dev": "concurrently \"npm run build:watch\" \"npm run start:dev\"",
"start": "NODE_ENV=production node dist/server.js"
},
"author": "",
"license": "ISC",
"dependencies": {
"argon2": "^0.28.5",
"bcrypt": "^5.0.1",
"compression": "^1.7.4",
"cookie-parser": "^1.4.6",
"cors": "^2.8.5",
@ -44,7 +43,6 @@
"tcp-port-used": "^1.0.2"
},
"devDependencies": {
"@types/bcrypt": "^5.0.0",
"@types/compression": "^1.7.2",
"@types/cookie-parser": "^1.4.3",
"@types/cors": "^2.8.12",

View file

@ -5,25 +5,21 @@ interface IConfig {
ROOT_FOLDER: string;
JWT_SECRET: string;
CLIENT_URLS: string[];
VERSION: string;
ROOT_FOLDER_HOST: string;
}
dotenv.config();
const { NODE_ENV = 'development', ROOT_FOLDER = '', JWT_SECRET = '', INTERNAL_IP = '' } = process.env;
const missing = [];
if (!ROOT_FOLDER) missing.push('ROOT_FOLDER');
if (missing.length > 0) {
throw new Error(`Missing environment variables: ${missing.join(', ')}`);
}
const { NODE_ENV = 'development', JWT_SECRET = '', INTERNAL_IP = '', TIPI_VERSION = '', ROOT_FOLDER_HOST = '' } = process.env;
const config: IConfig = {
NODE_ENV,
ROOT_FOLDER,
ROOT_FOLDER: '/tipi',
JWT_SECRET,
CLIENT_URLS: ['http://locahost:3000', `http://${INTERNAL_IP}`, `http://${INTERNAL_IP}:3000`],
CLIENT_URLS: ['http://localhost:3000', `http://${INTERNAL_IP}`, `http://${INTERNAL_IP}:3000`],
VERSION: TIPI_VERSION,
ROOT_FOLDER_HOST,
};
export default config;

View file

@ -84,7 +84,7 @@ describe('Install app', () => {
await AppsService.installApp('test-app', { test: 'test' });
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', 'test-app', '/tipi'], {}, expect.any(Function)]);
spy.mockRestore();
});
@ -96,8 +96,8 @@ describe('Install app', () => {
await AppsService.installApp('test-app', { test: 'test' });
expect(spy.mock.calls.length).toBe(2);
expect(spy.mock.calls[0]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.calls[1]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.calls[0]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['install', 'test-app', '/tipi'], {}, expect.any(Function)]);
expect(spy.mock.calls[1]).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', 'test-app', '/tipi'], {}, expect.any(Function)]);
spy.mockRestore();
});
@ -126,7 +126,7 @@ describe('Uninstall app', () => {
await AppsService.uninstallApp('test-app');
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['uninstall', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['uninstall', 'test-app', '/tipi'], {}, expect.any(Function)]);
spy.mockRestore();
});
@ -147,7 +147,7 @@ describe('Start app', () => {
await AppsService.startApp('test-app');
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['start', 'test-app', '/tipi'], {}, expect.any(Function)]);
spy.mockRestore();
});
@ -193,7 +193,7 @@ describe('Stop app', () => {
await AppsService.stopApp('test-app');
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['stop', 'test-app'], {}, expect.any(Function)]);
expect(spy.mock.lastCall).toEqual([`${config.ROOT_FOLDER}/scripts/app.sh`, ['stop', 'test-app', '/tipi'], {}, expect.any(Function)]);
});
it('Should throw if app is not installed', async () => {

View file

@ -3,6 +3,7 @@ import p from 'p-iteration';
import { AppConfig } from '../../config/types';
import { fileExists, readdirSync, readFile, readJsonFile, runScript, writeFile } from '../fs/fs.helpers';
import InternalIp from 'internal-ip';
import config from '../../config';
type AppsState = { installed: string };
@ -76,7 +77,7 @@ export const checkAppExists = (appName: string) => {
export const runAppScript = (params: string[]): Promise<void> => {
return new Promise((resolve, reject) => {
runScript('/scripts/app.sh', params, (err: string) => {
runScript('/scripts/app.sh', [...params, config.ROOT_FOLDER_HOST], (err: string) => {
if (err) {
reject(err);
}

View file

@ -1,5 +1,4 @@
import fs from 'fs';
// import bcrypt from 'bcrypt';
import jsonwebtoken from 'jsonwebtoken';
import * as argon2 from 'argon2';
import config from '../../../config';

View file

@ -30,7 +30,7 @@ const register = async (email: string, password: string, name: string) => {
throw new Error('User already exists');
}
const hash = await argon2.hash(password); // bcrypt.hash(password, 10);
const hash = await argon2.hash(password);
const newuser: IUser = { email, name, password: hash };
const token = await AuthHelpers.getJwtToken(newuser, password);

View file

@ -1,34 +1,42 @@
import { Request, Response } from 'express';
import si from 'systeminformation';
import fetch from 'node-fetch';
import config from '../../config';
import TipiCache from '../../config/cache';
import { readJsonFile } from '../fs/fs.helpers';
type CpuData = {
load: number;
};
type DiskData = {
size: number;
total: number;
used: number;
available: number;
};
type MemoryData = {
total: number;
free: number;
available: number;
used: number;
};
type SystemInfo = {
cpu: CpuData;
disk: DiskData;
memory: MemoryData;
};
/**
*
* @param req
* @param res
*/
const getCpuInfo = async (req: Request, res: Response<CpuData>) => {
// const cpuInfo = await cpu.getCpuInfo();
const cpuLoad = await si.currentLoad();
const systemInfo: SystemInfo = readJsonFile('/state/system-info.json');
res.status(200).send({ load: cpuLoad.currentLoad });
const cpu = systemInfo.cpu;
res.status(200).send({ load: cpu.load });
};
/**
@ -37,19 +45,9 @@ const getCpuInfo = async (req: Request, res: Response<CpuData>) => {
* @param res
*/
const getDiskInfo = async (req: Request, res: Response<DiskData>) => {
const disk = await si.fsSize();
const systemInfo: SystemInfo = readJsonFile('/state/system-info.json');
const rootDisk = disk.find((item) => item.mount === '/');
if (!rootDisk) {
throw new Error('Could not find root disk');
}
const result: DiskData = {
size: rootDisk.size,
used: rootDisk.used,
available: rootDisk.available,
};
const result: DiskData = systemInfo.disk;
res.status(200).send(result);
};
@ -60,32 +58,24 @@ const getDiskInfo = async (req: Request, res: Response<DiskData>) => {
* @param res
*/
const getMemoryInfo = async (req: Request, res: Response<MemoryData>) => {
const memory = await si.mem();
const systemInfo: SystemInfo = readJsonFile('/state/system-info.json');
const result: MemoryData = {
total: memory.total,
free: memory.free,
used: memory.used,
};
const result: MemoryData = systemInfo.memory;
res.status(200).json(result);
};
const getLatestVersion = async (req: Request, res: Response<string>) => {
const getVersion = async (_: Request, res: Response<{ current: string; latest: string }>) => {
let version = TipiCache.get<string>('latestVersion');
console.log('CACHED', version);
if (!version) {
const response = await fetch('https://api.github.com/repos/meienberger/runtipi/releases/latest');
const json = (await response.json()) as { name: string };
TipiCache.set('latestVersion', json.name);
version = json.name;
version = json.name.replace('v', '');
}
console.log(version);
res.status(200).send(version);
res.status(200).send({ current: config.VERSION, latest: version });
};
export default { getCpuInfo, getDiskInfo, getMemoryInfo, getLatestVersion };
export default { getCpuInfo, getDiskInfo, getMemoryInfo, getVersion };

View file

@ -6,6 +6,6 @@ const router = Router();
router.route('/cpu').get(SystemController.getCpuInfo);
router.route('/disk').get(SystemController.getDiskInfo);
router.route('/memory').get(SystemController.getMemoryInfo);
router.route('/version/latest').get(SystemController.getLatestVersion);
router.route('/version').get(SystemController.getVersion);
export default router;

View file

@ -23,7 +23,22 @@ if (isProd) {
app.use(helmet());
}
app.use(cors({ credentials: true, origin: config.CLIENT_URLS }));
app.use(
cors({
credentials: true,
origin: function (origin, callback) {
// allow requests with no origin
if (!origin) return callback(null, true);
if (config.CLIENT_URLS.indexOf(origin) === -1) {
var message = "The CORS policy for this origin doesn't allow access from the particular origin.";
return callback(new Error(message), false);
}
return callback(null, true);
},
}),
);
// Get user from token
app.use((req, res, next) => {

1614
pnpm-lock.yaml generated

File diff suppressed because it is too large Load diff

View file

@ -60,6 +60,7 @@ if [ -z ${2+x} ]; then
exit 1
else
app="$2"
root_folder_host="$3"
app_dir="${ROOT_FOLDER}/apps/${app}"
app_data_dir="${ROOT_FOLDER}/app-data/${app}"
@ -67,6 +68,11 @@ else
echo "Error: \"${app}\" is not a valid app"
exit 1
fi
if [[ -z "${root_folder_host}" ]]; then
echo "Error: Root folder not provided"
exit 1
fi
fi
if [ -z ${3+x} ]; then
@ -98,9 +104,9 @@ compose() {
local app_dir="${ROOT_FOLDER}/apps/${app}"
# Vars to use in compose file
export APP_DATA_DIR="${app_data_dir}"
export APP_DATA_DIR="${root_folder_host}/app-data/${app}"
export APP_DIR="${app_dir}"
export ROOT_FOLDER_HOST="${root_folder_host}"
export ROOT_FOLDER="${ROOT_FOLDER}"
# Docker-compose does not support multiple env files
@ -123,6 +129,11 @@ if [[ "$command" = "install" ]]; then
cp -r "${ROOT_FOLDER}/apps/${app}/data" "${app_data_dir}/data"
fi
# Remove all .gitkeep files from app data dir
find "${app_data_dir}" -name ".gitkeep" -exec rm -f {} \;
chown -R "1000:1000" "${app_data_dir}"
compose "${app}" up -d
exit
fi
@ -130,11 +141,12 @@ fi
# Removes images and destroys all data for an app
if [[ "$command" = "uninstall" ]]; then
echo "Removing images for app ${app}..."
compose "${app}" down --remove-orphans
# compose "${app}" down --remove-orphans
echo "Deleting app data for app ${app}..."
if [[ -d "${app_data_dir}" ]]; then
sudo rm -rf "${app_data_dir}"
rm -rf "${app_data_dir}"
fi
echo "Successfully uninstalled app ${app}"
@ -145,6 +157,7 @@ fi
if [[ "$command" = "stop" ]]; then
echo "Stopping app ${app}..."
compose "${app}" down --remove-orphans --rmi all
compose "${app}" rm --force --stop
exit
@ -153,6 +166,8 @@ fi
# Starts an installed app
if [[ "$command" = "start" ]]; then
echo "Starting app ${app}..."
compose "${app}" pull
compose "${app}" up --detach
exit

View file

@ -15,17 +15,49 @@ echo "=============== TIPI ================="
echo "======================================"
echo
# Install ansible if not installed
if ! command -v ansible-playbook > /dev/null; then
echo "Installing Ansible..."
sudo apt-get update
sudo apt-get install python3 python3-pip -y
sudo pip3 install ansible
# Enable passwordless sudo for $USERNAME
if ! grep -q "${USERNAME} ALL=(ALL) NOPASSWD: ALL" /etc/sudoers; then
echo "${USERNAME} ALL=(ALL) NOPASSWD: ALL" | sudo tee -a /etc/sudoers
fi
echo "Running ansible playbook setup.yml"
sudo apt-get upgrade
sudo apt-get install -y jq coreutils ca-certificates curl gnupg lsb-release
ansible-playbook ansible/setup.yml -i ansible/hosts -e username="$USERNAME"
LSB="$(lsb_release -is)"
# Add docker gpg key (Debian)
if [[ "${LSB}" == "Debian" ]]; then
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
fi
# Add docker gpg key (Ubuntu)
if [[ "${LSB}" == "Ubuntu" ]]; then
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
fi
# Add deb repo for docker (Debian)
if [[ "${LSB}" == "Debian" ]]; then
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
fi
# Add deb repo for docker (Ubuntu)
if [[ "${LSB}" == "Ubuntu" ]]; then
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
fi
sudo apt-get upgrade
# Install docker compose if not here
if ! command -v docker-compose > /dev/null; then
sudo curl -L "https://github.com/docker/compose/releases/download/v2.3.4/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
fi
# create docker group
if ! getent group docker > /dev/null; then
sudo groupadd docker
fi
sudo usermod -aG docker "${USERNAME}"
# echo "Configuring permissions..."
# echo

View file

@ -14,6 +14,11 @@ SED_ROOT_FOLDER="$(echo $ROOT_FOLDER | sed 's/\//\\\//g')"
INTERNAL_IP="$(hostname -I | awk '{print $1}')"
DNS_IP=9.9.9.9 # Default to Quad9 DNS
USERNAME="$(id -nu 1000)"
ARCHITECTURE="$(uname -m)"
if [[ "$architecture" == "aarch64" ]]; then
ARCHITECTURE="arm64"
fi
if [[ $UID != 0 ]]; then
echo "Tipi must be started as root"
@ -90,20 +95,14 @@ echo "Generating config files..."
[[ -f "${ROOT_FOLDER}/packages/system-api/.env" ]] && rm -f "${ROOT_FOLDER}/packages/system-api/.env"
# Store paths to intermediary config files
ENV_FILE="$ROOT_FOLDER/templates/.env"
ENV_FILE_SYSTEM_API="$ROOT_FOLDER/templates/.env-api"
# Remove intermediary config files
[[ -f "$ENV_FILE" ]] && rm -f "$ENV_FILE"
[[ -f "$ENV_FILE_SYSTEM_API" ]] && rm -f "$ENV_FILE_SYSTEM_API"
ENV_FILE=$(mktemp)
# Copy template configs to intermediary configs
[[ -f "$ROOT_FOLDER/templates/env-sample" ]] && cp "$ROOT_FOLDER/templates/env-sample" "$ENV_FILE"
[[ -f "$ROOT_FOLDER/templates/env-api-sample" ]] && cp "$ROOT_FOLDER/templates/env-api-sample" "$ENV_FILE_SYSTEM_API"
JWT_SECRET=$(derive_entropy "jwt")
for template in "${ENV_FILE}" "${ENV_FILE_SYSTEM_API}"; do
for template in "${ENV_FILE}"; do
sed -i "s/<dns_ip>/${DNS_IP}/g" "${template}"
sed -i "s/<internal_ip>/${INTERNAL_IP}/g" "${template}"
sed -i "s/<puid>/${PUID}/g" "${template}"
@ -111,26 +110,36 @@ for template in "${ENV_FILE}" "${ENV_FILE_SYSTEM_API}"; do
sed -i "s/<tz>/${TZ}/g" "${template}"
sed -i "s/<jwt_secret>/${JWT_SECRET}/g" "${template}"
sed -i "s/<root_folder>/${SED_ROOT_FOLDER}/g" "${template}"
sed -i "s/<tipi_version>/$(cat "${ROOT_FOLDER}/VERSION")/g" "${template}"
sed -i "s/<architecture>/${ARCHITECTURE}/g" "${template}"
done
mv -f "$ENV_FILE" "$ROOT_FOLDER/.env"
mv -f "$ENV_FILE_SYSTEM_API" "$ROOT_FOLDER/packages/system-api/.env"
ansible-playbook ansible/start.yml -i ansible/hosts -e username="$USERNAME"
# Run system-info.sh
echo "Running system-info.sh..."
bash "${ROOT_FOLDER}/scripts/system-info.sh"
# ansible-playbook ansible/start.yml -i ansible/hosts -K -e username="$USERNAME"
docker-compose --env-file "${ROOT_FOLDER}/.env" pull
# Run docker-compose
docker-compose --env-file "${ROOT_FOLDER}/.env" up --detach --remove-orphans --build || {
echo "Failed to start containers"
exit 1
}
str=$(get_json_field ${STATE_FOLDER}/apps.json installed)
apps_to_start=($str)
# str=$(get_json_field ${STATE_FOLDER}/apps.json installed)
# apps_to_start=($str)
# for app in "${apps_to_start[@]}"; do
# "${ROOT_FOLDER}/scripts/app.sh" start $app
# done
# Give permissions 1000:1000 to app data
chown -R 1000:1000 "${ROOT_FOLDER}/app-data"
echo "Tipi is now running"
echo ""
cat << "EOF"

25
scripts/system-info.sh Executable file
View file

@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -e # Exit immediately if a command exits with a non-zero status.
ROOT_FOLDER="$(readlink -f $(dirname "${BASH_SOURCE[0]}")/..)"
STATE_FOLDER="${ROOT_FOLDER}/state"
# Available disk space
TOTAL_DISK_SPACE_BYTES=$(df -P -B 1 / | tail -n 1 | awk '{print $2}')
AVAILABLE_DISK_SPACE_BYTES=$(df -P -B 1 / | tail -n 1 | awk '{print $4}')
USED_DISK_SPACE_BYTES=$(($TOTAL_DISK_SPACE_BYTES - $AVAILABLE_DISK_SPACE_BYTES))
# CPU info
CPU_LOAD_PERCENTAGE=$(top -bn1 | grep "Cpu(s)" | sed "s/.*, *\([0-9.]*\)%* id.*/\1/" | awk '{print 100 - $1}')
# Memory info
MEM_TOTAL_BYTES=$(free -b | grep Mem | awk '{print $2}')
MEM_AVAILABLE_BYTES=$(free -b | grep Mem | awk '{print $7}')
MEM_USED_BYTES=$(($MEM_TOTAL_BYTES - $MEM_AVAILABLE_BYTES))
# Create temporary json file
TEMP_JSON_FILE=$(mktemp)
echo '{ "cpu": { "load": '"${CPU_LOAD_PERCENTAGE}"' }, "memory": { "total": '"${MEM_TOTAL_BYTES}"' , "used": '"${MEM_USED_BYTES}"', "available": '"${MEM_AVAILABLE_BYTES}"' }, "disk": { "total": '"${TOTAL_DISK_SPACE_BYTES}"' , "used": '"${USED_DISK_SPACE_BYTES}"', "available": '"${AVAILABLE_DISK_SPACE_BYTES}"' } }' > "${TEMP_JSON_FILE}"
# Write to state file
echo "$(cat "${TEMP_JSON_FILE}")" > "${STATE_FOLDER}/system-info.json"

View file

@ -1,4 +0,0 @@
ROOT_FOLDER=<root_folder>
JWT_SECRET=<jwt_secret>
INTERNAL_IP=<internal_ip>
ARCHITECTURE=<architecture>

View file

@ -7,3 +7,6 @@ PGID=<pgid>
INTERNAL_IP=<internal_ip>
DNS_IP=<dns_ip>
ARCHITECTURE=<architecture>
TIPI_VERSION=<tipi_version>
JWT_SECRET=<jwt_secret>
ROOT_FOLDER_HOST=<root_folder>