Compare commits
571 commits
fix/epipe-
...
develop
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c962f030e3 | ||
![]() |
1f5701120f | ||
![]() |
249103f0a6 | ||
![]() |
bb1f343ac9 | ||
![]() |
6b371de137 | ||
![]() |
7133a2a2dc | ||
![]() |
44d40b4a34 | ||
![]() |
03e2604ca0 | ||
![]() |
ba3d860176 | ||
![]() |
250e78450f | ||
![]() |
d11299eeb8 | ||
![]() |
6cd7ca1a4e | ||
![]() |
bb9f26b2b1 | ||
![]() |
30890bedd5 | ||
![]() |
3876e8ee55 | ||
![]() |
92d5a7b6a3 | ||
![]() |
ca3dc64fd4 | ||
![]() |
59dbe672c8 | ||
![]() |
42349c5a27 | ||
![]() |
8104a9f3f7 | ||
![]() |
83e96cfd31 | ||
![]() |
4d69fc4cff | ||
![]() |
37c551da1f | ||
![]() |
852128f551 | ||
![]() |
bffa31c0b7 | ||
![]() |
0f129a7809 | ||
![]() |
79b448adf3 | ||
![]() |
dffa3ed670 | ||
![]() |
ef1ac3633b | ||
![]() |
e520afdeff | ||
![]() |
52499cb0bd | ||
![]() |
cfeb9d4e19 | ||
![]() |
a1515ac7b8 | ||
![]() |
396d08dde0 | ||
![]() |
3c7dcfeb5e | ||
![]() |
9b5bcc7147 | ||
![]() |
5c276bd235 | ||
![]() |
c1723bb36b | ||
![]() |
31c3de78b8 | ||
![]() |
197f6e3998 | ||
![]() |
abff9a4d5a | ||
![]() |
91a361add1 | ||
![]() |
68b3e4e8bd | ||
![]() |
f893cf482d | ||
![]() |
88878fccda | ||
![]() |
64325150d5 | ||
![]() |
203db0160a | ||
![]() |
0261cf577c | ||
![]() |
166d9b49f2 | ||
![]() |
213b9ed482 | ||
![]() |
de3141e1f8 | ||
![]() |
6ec79ac1e8 | ||
![]() |
5dbc996b92 | ||
![]() |
f76af65212 | ||
![]() |
1357efa0e3 | ||
![]() |
4da6193ae9 | ||
![]() |
f61e6b2dae | ||
![]() |
230ae0a412 | ||
![]() |
7c967271d7 | ||
![]() |
3d85051915 | ||
![]() |
467fc070d9 | ||
![]() |
14b4331d1d | ||
![]() |
dc427acced | ||
![]() |
769f671749 | ||
![]() |
fb861e26be | ||
![]() |
ab71d75fd1 | ||
![]() |
8743422741 | ||
![]() |
47e19b967f | ||
![]() |
f43bec37bb | ||
![]() |
0fe29a9a80 | ||
![]() |
0f1c7d4ee5 | ||
![]() |
bd336c652e | ||
![]() |
c1ac3060f2 | ||
![]() |
bbe0c961cc | ||
![]() |
c18b00e78b | ||
![]() |
60e28fd6b2 | ||
![]() |
2ef93c50eb | ||
![]() |
ef541f1d77 | ||
![]() |
3dd7ef7cdd | ||
![]() |
922b7722f1 | ||
![]() |
ce0bc75046 | ||
![]() |
7d7cee3405 | ||
![]() |
56acfbbb64 | ||
![]() |
e6d838f3d1 | ||
![]() |
3bfb015b13 | ||
![]() |
dd32bd631f | ||
![]() |
8bfafac7bd | ||
![]() |
e0d52e79c1 | ||
![]() |
2dcb358392 | ||
![]() |
9efacb5797 | ||
![]() |
8e19c8b0e8 | ||
![]() |
bdfb019df2 | ||
![]() |
af8509aacc | ||
![]() |
55e0cd155e | ||
![]() |
417785ee24 | ||
![]() |
5f18ff16de | ||
![]() |
09461527ee | ||
![]() |
ff6c90849e | ||
![]() |
d032bbdc5f | ||
![]() |
df59d21ce7 | ||
![]() |
f38c3416a4 | ||
![]() |
ff9bf2760b | ||
![]() |
87c891d760 | ||
![]() |
566f4f0d1e | ||
![]() |
e81e26fb07 | ||
![]() |
bb7bd60be0 | ||
![]() |
d823eba976 | ||
![]() |
45e71539db | ||
![]() |
47fb287bd5 | ||
![]() |
871d72e3b4 | ||
![]() |
fa35eb3509 | ||
![]() |
275b60c363 | ||
![]() |
1333414ca1 | ||
![]() |
92238dd8c9 | ||
![]() |
5b743f2d0a | ||
![]() |
c18a1d87e1 | ||
![]() |
15130cbe5a | ||
![]() |
1296c6c3ce | ||
![]() |
49c6a8f9b7 | ||
![]() |
955b4ccc18 | ||
![]() |
8c0956aa56 | ||
![]() |
cea9697b6d | ||
![]() |
677fb12ad4 | ||
![]() |
e6ffa7df12 | ||
![]() |
eccc1decad | ||
![]() |
822c979458 | ||
![]() |
79153b6b78 | ||
![]() |
0eb7218063 | ||
![]() |
a2d9610c22 | ||
![]() |
c4d25792ad | ||
![]() |
9c4597da8e | ||
![]() |
1d6228a96a | ||
![]() |
8af4028027 | ||
![]() |
3ddcf3732e | ||
![]() |
ab3dcabbf9 | ||
![]() |
c0723257e6 | ||
![]() |
5830d16382 | ||
![]() |
fd6c5afe2c | ||
![]() |
9b9541cee7 | ||
![]() |
bf8f635680 | ||
![]() |
9688e059f3 | ||
![]() |
6c0f7548f7 | ||
![]() |
e8b362914a | ||
![]() |
a4af320fba | ||
![]() |
8b221dcf0a | ||
![]() |
5b7615a8ee | ||
![]() |
e06e946376 | ||
![]() |
a9d8f2e35f | ||
![]() |
d93bf2cb7c | ||
![]() |
01062d0041 | ||
![]() |
28811c5429 | ||
![]() |
bb887f284e | ||
![]() |
0fe2ab13ab | ||
![]() |
bcb1b37110 | ||
![]() |
6341140e9d | ||
![]() |
ac6f54312d | ||
![]() |
96a00de418 | ||
![]() |
2af1239c67 | ||
![]() |
d2c6909959 | ||
![]() |
a51e26e0dc | ||
![]() |
3a41c0c9c0 | ||
![]() |
17f13690a5 | ||
![]() |
63869cc1f3 | ||
![]() |
d5a313fc03 | ||
![]() |
36a683a36e | ||
![]() |
9a95a980ad | ||
![]() |
5be75ad98b | ||
![]() |
f38b66c6e8 | ||
![]() |
f39232e1a7 | ||
![]() |
435ff66522 | ||
![]() |
ef0298665c | ||
![]() |
302f819d02 | ||
![]() |
3ebb345373 | ||
![]() |
d76a1fe8af | ||
![]() |
82b9cfa96d | ||
![]() |
9e1137da9e | ||
![]() |
3388fdaf88 | ||
![]() |
2cdc7e2d5b | ||
![]() |
db1e544f12 | ||
![]() |
0888e4a873 | ||
![]() |
9612ff3336 | ||
![]() |
8ab15acf84 | ||
![]() |
1f4b215497 | ||
![]() |
1925b0cb75 | ||
![]() |
04b9c9d5e0 | ||
![]() |
8dd4c78641 | ||
![]() |
89fe3156a5 | ||
![]() |
cb77164ae6 | ||
![]() |
ee92eade81 | ||
![]() |
62b91f5d91 | ||
![]() |
8919700245 | ||
![]() |
7f9346ff80 | ||
![]() |
ed127b1735 | ||
![]() |
92e661890e | ||
![]() |
3a6f19e8df | ||
![]() |
b85fbc3224 | ||
![]() |
5db75f1d14 | ||
![]() |
12cafc6a97 | ||
![]() |
50f509d04e | ||
![]() |
d5766070c4 | ||
![]() |
3972340fe7 | ||
![]() |
b18bf2a265 | ||
![]() |
a2e46a84bb | ||
![]() |
1dd64af4ed | ||
![]() |
9f1e09098a | ||
![]() |
b1cd37ddba | ||
![]() |
80550b1de7 | ||
![]() |
7b62c1b1ee | ||
![]() |
28217d1f5a | ||
![]() |
24b6cb6b00 | ||
![]() |
f75c412295 | ||
![]() |
8bf51997ac | ||
![]() |
55084119f6 | ||
![]() |
e816b77b25 | ||
![]() |
86daa95258 | ||
![]() |
96499d2ef7 | ||
![]() |
7ded3aace4 | ||
![]() |
30c7d6ea4d | ||
![]() |
00382f99d3 | ||
![]() |
8682442735 | ||
![]() |
2847ceab78 | ||
![]() |
ec228bb071 | ||
![]() |
81fe58c54d | ||
![]() |
5711910aa0 | ||
![]() |
1f3cafcc4c | ||
![]() |
dc7b5a4c78 | ||
![]() |
e2641d64da | ||
![]() |
8a7d2cfd56 | ||
![]() |
7197ae712a | ||
![]() |
23f2f9fe53 | ||
![]() |
4985e57197 | ||
![]() |
ae60ce162d | ||
![]() |
7fe2675c72 | ||
![]() |
c6265b48eb | ||
![]() |
733ac5bbed | ||
![]() |
20a0461ee6 | ||
![]() |
deeaaf012c | ||
![]() |
adbb347f28 | ||
![]() |
60222dddd4 | ||
![]() |
2f34560fa2 | ||
![]() |
26f2ba4e46 | ||
![]() |
01fe1445af | ||
![]() |
e68369f532 | ||
![]() |
bedb07bc8f | ||
![]() |
238c43440b | ||
![]() |
7c4edde4e7 | ||
![]() |
7f130b2978 | ||
![]() |
4850e348fe | ||
![]() |
5468bb08d5 | ||
![]() |
b11ba9b532 | ||
![]() |
b8c5d50d45 | ||
![]() |
f6fcddce9d | ||
![]() |
7f9a5dded6 | ||
![]() |
6affffa83e | ||
![]() |
82f7705650 | ||
![]() |
985c8502ce | ||
![]() |
ab68c055af | ||
![]() |
e41f68cced | ||
![]() |
96305bf13b | ||
![]() |
9dd46d7397 | ||
![]() |
03efbda77d | ||
![]() |
13cd46450f | ||
![]() |
4cf0ae2592 | ||
![]() |
b81fd4db36 | ||
![]() |
7176971224 | ||
![]() |
38bca76219 | ||
![]() |
98c723100d | ||
![]() |
043bb7f894 | ||
![]() |
8920fdb33d | ||
![]() |
ce5d9520f4 | ||
![]() |
f43988a561 | ||
![]() |
a1a1cab530 | ||
![]() |
c9e57eb01d | ||
![]() |
0719e4782d | ||
![]() |
870a737c65 | ||
![]() |
f0cd9bf13f | ||
![]() |
0c175fee74 | ||
![]() |
752c1db155 | ||
![]() |
725ba21144 | ||
![]() |
341c433385 | ||
![]() |
8758393620 | ||
![]() |
53a9a9db3d | ||
![]() |
2c1f5eacb8 | ||
![]() |
1edea9bea6 | ||
![]() |
134d9212fa | ||
![]() |
11700a3f10 | ||
![]() |
db506509e9 | ||
![]() |
481996ec76 | ||
![]() |
3fcba91e5b | ||
![]() |
4d86d80e3e | ||
![]() |
ac12afb1f2 | ||
![]() |
886edeb8b0 | ||
![]() |
9779555526 | ||
![]() |
884f1d3640 | ||
![]() |
faeab592c1 | ||
![]() |
143a3ebd0a | ||
![]() |
f40501b3a0 | ||
![]() |
56cb75831d | ||
![]() |
32dc6c28ba | ||
![]() |
c25d1dec69 | ||
![]() |
adabdac2a9 | ||
![]() |
b61cc75a92 | ||
![]() |
86aa9a9cc4 | ||
![]() |
bbdc40f93e | ||
![]() |
6cfe41a64f | ||
![]() |
cfa308bb04 | ||
![]() |
f197551902 | ||
![]() |
f8595d205a | ||
![]() |
ce98159b2d | ||
![]() |
5bb6ea047e | ||
![]() |
fbab23305e | ||
![]() |
9a08b8de67 | ||
![]() |
c982fc639b | ||
![]() |
66e6d93ef5 | ||
![]() |
4e63224b07 | ||
![]() |
c8857d0ecb | ||
![]() |
a9f5349aa8 | ||
![]() |
d959a1b7d8 | ||
![]() |
ec9112e941 | ||
![]() |
4577adb008 | ||
![]() |
fae4c7f9bc | ||
![]() |
12ad9045c9 | ||
![]() |
32768252fa | ||
![]() |
bb035cb84c | ||
![]() |
cf5f14c0bb | ||
![]() |
b0794eae47 | ||
![]() |
a90fbd869c | ||
![]() |
7ff9930893 | ||
![]() |
b57bc8595a | ||
![]() |
ab950340e2 | ||
![]() |
f928b5a056 | ||
![]() |
43415f9e14 | ||
![]() |
57dc28f518 | ||
![]() |
ee17e0cb78 | ||
![]() |
468fd0e07a | ||
![]() |
4dc0885004 | ||
![]() |
1dfdbec246 | ||
![]() |
fd7a5aa0d0 | ||
![]() |
27f5ccda82 | ||
![]() |
3b25969619 | ||
![]() |
adc983bccd | ||
![]() |
04c50bafd3 | ||
![]() |
3d5a041a9d | ||
![]() |
abde5868ac | ||
![]() |
27651d53b5 | ||
![]() |
2cff0de4ac | ||
![]() |
58a83391e2 | ||
![]() |
1fb8b6409a | ||
![]() |
6446eab877 | ||
![]() |
ea4d99df3d | ||
![]() |
5b28f50847 | ||
![]() |
ab3409d411 | ||
![]() |
cc86483f5e | ||
![]() |
23fb14b414 | ||
![]() |
39e8041a06 | ||
![]() |
bc8c4bfe88 | ||
![]() |
a3ce08bee3 | ||
![]() |
c70a0845a8 | ||
![]() |
986784d81c | ||
![]() |
362ff6eff8 | ||
![]() |
0f16571ee3 | ||
![]() |
bbba17168b | ||
![]() |
0221021b84 | ||
![]() |
9def6bc4d9 | ||
![]() |
e0865f1326 | ||
![]() |
0dfc0a699f | ||
![]() |
ed9477c068 | ||
![]() |
8d690e408a | ||
![]() |
7fefd2ed49 | ||
![]() |
1c3e34359f | ||
![]() |
eb0084744c | ||
![]() |
363921d246 | ||
![]() |
36675a67d4 | ||
![]() |
34006c680b | ||
![]() |
81d54c3ee2 | ||
![]() |
fbefc31bd3 | ||
![]() |
1b434e7355 | ||
![]() |
b3e1245da2 | ||
![]() |
7b533bb0bc | ||
![]() |
9bc96ffa9c | ||
![]() |
5d2d8e7b9c | ||
![]() |
42edd1c4a4 | ||
![]() |
bd8ce501ec | ||
![]() |
b5945f4fcd | ||
![]() |
c4fb416903 | ||
![]() |
c60f77bf02 | ||
![]() |
61cac4abf6 | ||
![]() |
c3ea3e8cdc | ||
![]() |
e8ea9d6099 | ||
![]() |
86556a6477 | ||
![]() |
f0831db9d6 | ||
![]() |
163d2f9374 | ||
![]() |
304e4f7d66 | ||
![]() |
4647bd206a | ||
![]() |
5b493a79d9 | ||
![]() |
944b886148 | ||
![]() |
93dc514a2b | ||
![]() |
2e8c8883c5 | ||
![]() |
32ab0da985 | ||
![]() |
a8933e592e | ||
![]() |
da31470fd7 | ||
![]() |
23a115b955 | ||
![]() |
bd1b7dfac9 | ||
![]() |
779f7951d9 | ||
![]() |
fd7f9d810a | ||
![]() |
fe151b7cb7 | ||
![]() |
0cf26a18f0 | ||
![]() |
26ea3a2443 | ||
![]() |
3f13f593b9 | ||
![]() |
10cd63b086 | ||
![]() |
7c40c7e0da | ||
![]() |
d3b19e0a6c | ||
![]() |
ddfd4bb44f | ||
![]() |
49f025e188 | ||
![]() |
e9da22f686 | ||
![]() |
b159c151ae | ||
![]() |
c97360f601 | ||
![]() |
2e91687643 | ||
![]() |
867cf32e56 | ||
![]() |
8e337ad337 | ||
![]() |
238a791a77 | ||
![]() |
1047ead206 | ||
![]() |
96f38927d5 | ||
![]() |
fcce86de09 | ||
![]() |
644dfbc7a6 | ||
![]() |
4917f2da75 | ||
![]() |
3c6a5b6eaf | ||
![]() |
35bdfee701 | ||
![]() |
44280cb67a | ||
![]() |
5f71ffcfa4 | ||
![]() |
8be2856eed | ||
![]() |
06d14f1500 | ||
![]() |
c83a2f70aa | ||
![]() |
7c6d94fc5f | ||
![]() |
cc3334438e | ||
![]() |
7cb54ae341 | ||
![]() |
e65461b146 | ||
![]() |
ead4ced154 | ||
![]() |
25968a8198 | ||
![]() |
1e4f4805e7 | ||
![]() |
26bb80e743 | ||
![]() |
24c4d0f8af | ||
![]() |
021b088c41 | ||
![]() |
ecf1eff86b | ||
![]() |
7a87bb7d8e | ||
![]() |
452bb296cf | ||
![]() |
10333a088e | ||
![]() |
d62558f41d | ||
![]() |
5716034427 | ||
![]() |
eac2c1bdb0 | ||
![]() |
183516117e | ||
![]() |
50db5c0912 | ||
![]() |
710eaa8aaf | ||
![]() |
5dd3f6963b | ||
![]() |
8b7952c6db | ||
![]() |
c89b9fe752 | ||
![]() |
6efbce5b75 | ||
![]() |
6627583ebf | ||
![]() |
0f2a36ce3b | ||
![]() |
333a8f3728 | ||
![]() |
4b55ca5206 | ||
![]() |
c0480ce636 | ||
![]() |
836999815e | ||
![]() |
8deb7ac79a | ||
![]() |
37f19b2d59 | ||
![]() |
94c33e6a15 | ||
![]() |
dc68a348ff | ||
![]() |
ef16ef7ec5 | ||
![]() |
9082b509db | ||
![]() |
92dbfc424d | ||
![]() |
fb3beae382 | ||
![]() |
4151fb30ba | ||
![]() |
c9cddf1273 | ||
![]() |
eaa42c7975 | ||
![]() |
b794c220b7 | ||
![]() |
37a74f8db4 | ||
![]() |
01be8bfb5c | ||
![]() |
00b269011f | ||
![]() |
0e29f81e5a | ||
![]() |
38574f9d6b | ||
![]() |
8785187989 | ||
![]() |
0d0cbf39f8 | ||
![]() |
309ddc77dd | ||
![]() |
bf3f023743 | ||
![]() |
41edb6f7c8 | ||
![]() |
928b789a4e | ||
![]() |
c0057e0b06 | ||
![]() |
5ff0c9fe89 | ||
![]() |
e7d34755de | ||
![]() |
9a65b7e293 | ||
![]() |
dbaa5e5759 | ||
![]() |
d149d1264c | ||
![]() |
575a9a8fac | ||
![]() |
be4a4f0aa0 | ||
![]() |
1af0d8fde4 | ||
![]() |
eed24135dc | ||
![]() |
369d20e4b6 | ||
![]() |
9feec710bc | ||
![]() |
76f561ceb5 | ||
![]() |
60e376f18f | ||
![]() |
dfc036afee | ||
![]() |
2034c37223 | ||
![]() |
c1fab4e85e | ||
![]() |
d7ef9b8373 | ||
![]() |
c40be246c5 | ||
![]() |
d90be34d2e | ||
![]() |
079dc4d026 | ||
![]() |
44effa540d | ||
![]() |
4e6f9baf8f | ||
![]() |
c7f0062be7 | ||
![]() |
965cd669e1 | ||
![]() |
bd0015de87 | ||
![]() |
63868213d7 | ||
![]() |
4f590aac18 | ||
![]() |
1a90f89858 | ||
![]() |
708536ff5d | ||
![]() |
610d6aa50a | ||
![]() |
99bf86e94e | ||
![]() |
b2d84b8a31 | ||
![]() |
ab142eacbd | ||
![]() |
ae6bd40bf0 | ||
![]() |
afcf593d42 | ||
![]() |
c70d7282dc | ||
![]() |
f6247b993b | ||
![]() |
c08a374df2 | ||
![]() |
868bd0c958 | ||
![]() |
180631efd7 | ||
![]() |
fcb874600e | ||
![]() |
ade2969360 | ||
![]() |
6aa4498818 | ||
![]() |
ea8eed1e9f | ||
![]() |
f31b312c2d | ||
![]() |
077977bab1 | ||
![]() |
a463ea2736 | ||
![]() |
1d85601b27 | ||
![]() |
13678db8b9 | ||
![]() |
591443a90a | ||
![]() |
3d69de5d92 | ||
![]() |
68a6f6357a | ||
![]() |
64d41ecf71 | ||
![]() |
ef78c8afaa | ||
![]() |
19436626fb | ||
![]() |
ce6846862e | ||
![]() |
96a30437ae | ||
![]() |
818d7d3b94 | ||
![]() |
9945d5d728 | ||
![]() |
6803e9a619 | ||
![]() |
eaadb514c8 | ||
![]() |
0af30663e4 | ||
![]() |
7942250dee | ||
![]() |
280a035ce2 | ||
![]() |
4476ee80c4 | ||
![]() |
930254e0be | ||
![]() |
b4ce06e5e2 | ||
![]() |
b567ee1f74 | ||
![]() |
ce83e0b284 | ||
![]() |
6f2240abd8 | ||
![]() |
d9a5930c35 | ||
![]() |
ee8153844d | ||
![]() |
4abc6e8647 | ||
![]() |
0f8203715e | ||
![]() |
211b78883b | ||
![]() |
149d097e07 | ||
![]() |
c336bb8636 | ||
![]() |
403de89391 | ||
![]() |
4bb42db073 | ||
![]() |
30471226c2 | ||
![]() |
3db4a53131 | ||
![]() |
0509d8e9a4 | ||
![]() |
6899f7e1b4 | ||
![]() |
d54ddc6dd7 |
444 changed files with 9300 additions and 9992 deletions
|
@ -315,7 +315,9 @@
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/106091011?v=4",
|
"avatar_url": "https://avatars.githubusercontent.com/u/106091011?v=4",
|
||||||
"profile": "https://github.com/steveiliop56",
|
"profile": "https://github.com/steveiliop56",
|
||||||
"contributions": [
|
"contributions": [
|
||||||
"translation"
|
"translation",
|
||||||
|
"code",
|
||||||
|
"test"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -353,11 +355,56 @@
|
||||||
"contributions": [
|
"contributions": [
|
||||||
"translation"
|
"translation"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "itsrllyhim",
|
||||||
|
"name": "him",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/143047010?v=4",
|
||||||
|
"profile": "https://github.com/itsrllyhim",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "cchalop1",
|
||||||
|
"name": "CHALOPIN Clément",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/28163855?v=4",
|
||||||
|
"profile": "http://cchalop1.com",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "geetansh",
|
||||||
|
"name": "Geetansh Jindal",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/9976198?v=4",
|
||||||
|
"profile": "https://github.com/geetansh",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "0livier",
|
||||||
|
"name": "Olivier Garcia",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/10607?v=4",
|
||||||
|
"profile": "https://github.com/0livier",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"login": "qcoudeyr",
|
||||||
|
"name": "qcoudeyr",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/124463277?v=4",
|
||||||
|
"profile": "https://github.com/qcoudeyr",
|
||||||
|
"contributions": [
|
||||||
|
"code"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"contributorsPerLine": 7,
|
"contributorsPerLine": 7,
|
||||||
"projectName": "runtipi",
|
"projectName": "runtipi",
|
||||||
"projectOwner": "meienberger",
|
"projectOwner": "runtipi",
|
||||||
"repoType": "github",
|
"repoType": "github",
|
||||||
"repoHost": "https://github.com",
|
"repoHost": "https://github.com",
|
||||||
"skipCi": true,
|
"skipCi": true,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
echo '{
|
echo '{
|
||||||
"appsRepoUrl": "https://github.com/meienberger/runtipi-appstore.git/"
|
"appsRepoUrl": "https://github.com/runtipi/runtipi-appstore.git/"
|
||||||
}' > state/settings.json
|
}' > state/settings.json
|
||||||
npm i -g pnpm
|
npm i -g pnpm
|
||||||
pnpm i
|
pnpm i
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
APPS_REPO_ID=7a92c8307e0a8074763c80be1fcfa4f87da6641daea9211aea6743b0116aba3b
|
APPS_REPO_ID=7a92c8307e0a8074763c80be1fcfa4f87da6641daea9211aea6743b0116aba3b
|
||||||
APPS_REPO_URL=https://github.com/meienberger/runtipi-appstore
|
APPS_REPO_URL=https://github.com/runtipi/runtipi-appstore
|
||||||
TZ=Etc/UTC
|
TZ=Etc/UTC
|
||||||
INTERNAL_IP=localhost
|
INTERNAL_IP=localhost
|
||||||
DNS_IP=9.9.9.9
|
DNS_IP=9.9.9.9
|
||||||
ARCHITECTURE=arm64 # arm64 or amd64
|
ARCHITECTURE=arm64
|
||||||
TIPI_VERSION=1.5.2
|
TIPI_VERSION=1.5.2
|
||||||
JWT_SECRET=secret
|
JWT_SECRET=secret
|
||||||
ROOT_FOLDER_HOST=/path/to/runtipi # absolute path to the root folder of the runtipi installation
|
ROOT_FOLDER_HOST=/path/to/runtipi
|
||||||
STORAGE_PATH=/path/to/runtipi # absolute path to the root folder of the runtipi installation
|
STORAGE_PATH=/path/to/runtipi
|
||||||
NGINX_PORT=7000
|
NGINX_PORT=7000
|
||||||
NGINX_PORT_SSL=443
|
NGINX_PORT_SSL=443
|
||||||
DOMAIN=tipi.localhost
|
DOMAIN=tipi.localhost
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
plugins: ['@typescript-eslint', 'import', 'react', 'jest', 'jsdoc', 'jsx-a11y', 'testing-library', 'jest-dom'],
|
plugins: ['@typescript-eslint', 'import', 'react', 'jest', 'jsx-a11y', 'testing-library', 'jest-dom'],
|
||||||
extends: [
|
extends: [
|
||||||
'plugin:@typescript-eslint/recommended',
|
'plugin:@typescript-eslint/recommended',
|
||||||
'next/core-web-vitals',
|
'next/core-web-vitals',
|
||||||
|
@ -10,7 +10,6 @@ module.exports = {
|
||||||
'plugin:import/typescript',
|
'plugin:import/typescript',
|
||||||
'prettier',
|
'prettier',
|
||||||
'plugin:react/recommended',
|
'plugin:react/recommended',
|
||||||
'plugin:jsdoc/recommended',
|
|
||||||
'plugin:jsx-a11y/recommended',
|
'plugin:jsx-a11y/recommended',
|
||||||
],
|
],
|
||||||
parser: '@typescript-eslint/parser',
|
parser: '@typescript-eslint/parser',
|
||||||
|
@ -53,8 +52,6 @@ module.exports = {
|
||||||
'no-underscore-dangle': 0,
|
'no-underscore-dangle': 0,
|
||||||
'arrow-body-style': 0,
|
'arrow-body-style': 0,
|
||||||
'class-methods-use-this': 0,
|
'class-methods-use-this': 0,
|
||||||
'jsdoc/require-returns': 0,
|
|
||||||
'jsdoc/tag-lines': 0,
|
|
||||||
'import/extensions': [
|
'import/extensions': [
|
||||||
'error',
|
'error',
|
||||||
'ignorePackages',
|
'ignorePackages',
|
||||||
|
|
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
11
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -11,7 +11,7 @@ assignees: meienberger
|
||||||
Before opening your issue be sure to have completed all those tasks.
|
Before opening your issue be sure to have completed all those tasks.
|
||||||
- [ ] I have searched for an already existing issue with similar context and errors. My issue has not yet been reported.
|
- [ ] I have searched for an already existing issue with similar context and errors. My issue has not yet been reported.
|
||||||
- [ ] I have included a clear description and steps to reproduce.
|
- [ ] I have included a clear description and steps to reproduce.
|
||||||
- [ ] I have included my OS information
|
- [ ] I have included logs from the file `runtipi/logs/error.log` if relevant
|
||||||
|
|
||||||
**Describe the bug**
|
**Describe the bug**
|
||||||
A clear and concise description of what the bug is.
|
A clear and concise description of what the bug is.
|
||||||
|
@ -29,11 +29,10 @@ A clear and concise description of what you expected to happen.
|
||||||
**Screenshots**
|
**Screenshots**
|
||||||
If applicable, add screenshots to help explain your problem.
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
**Desktop (please complete the following information):**
|
**Server (please complete the following information):**
|
||||||
- OS: [e.g. iOS]
|
- OS: [e.g. Ubuntu 20.04]
|
||||||
- Browser [e.g. chrome, safari]
|
- Tipi Version [e.g. 2.0.5] (can be found in settings page)
|
||||||
- Version [e.g. 22]
|
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Add any other context about the problem here. Like results of the `start` script or container logs
|
Please include logs here `runtipi/logs/error.log` and add any other context about the problem here. Like results of the `start` script or container logs `docker logs ...`
|
||||||
|
|
||||||
|
|
122
.github/workflows/alpha-release.yml
vendored
122
.github/workflows/alpha-release.yml
vendored
|
@ -11,65 +11,95 @@ jobs:
|
||||||
create-tag:
|
create-tag:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
tagname: ${{ steps.create_tag.outputs.tagname }}
|
tagname: ${{ steps.get_tag.outputs.tagname }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Create Tag
|
- name: Get tag from package.json
|
||||||
id: create_tag
|
id: get_tag
|
||||||
uses: butlerlogic/action-autotag@stable
|
run: |
|
||||||
env:
|
VERSION=$(npm run version --silent)
|
||||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
echo "tagname=v${VERSION}-alpha.${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: rickstaa/action-create-tag@v1
|
||||||
with:
|
with:
|
||||||
tag_prefix: 'v'
|
tag: ${{ steps.get_tag.outputs.tagname }}
|
||||||
tag_suffix: '-alpha.${{ github.event.inputs.tag }}'
|
|
||||||
|
build-worker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: create-tag
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push images
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./packages/worker/Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ github.repository_owner }}/worker:${{ needs.create-tag.outputs.tagname }}
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache
|
||||||
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache,mode=max
|
||||||
|
|
||||||
build-images:
|
build-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: create-tag
|
needs: create-tag
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ghcr.io
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push images
|
- name: Build and push images
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: meienberger/runtipi:${{ needs.create-tag.outputs.tagname }}
|
tags: ghcr.io/${{ github.repository_owner }}/runtipi:${{ needs.create-tag.outputs.tagname }}
|
||||||
cache-from: type=registry,ref=meienberger/runtipi:buildcache
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache
|
||||||
cache-to: type=registry,ref=meienberger/runtipi:buildcache,mode=max
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache,mode=max
|
||||||
|
|
||||||
build-cli:
|
build-cli:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: create-tag
|
needs: create-tag
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
@ -99,51 +129,37 @@ jobs:
|
||||||
run: pnpm -r --filter cli package
|
run: pnpm -r --filter cli package
|
||||||
|
|
||||||
- name: Upload CLI
|
- name: Upload CLI
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: packages/cli/dist
|
path: packages/cli/dist
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [create-tag, build-images, build-cli]
|
needs: [create-tag, build-images, build-cli, build-worker]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download CLI
|
- name: Download CLI
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: cli
|
path: cli
|
||||||
|
|
||||||
|
- name: Rename CLI
|
||||||
|
run: |
|
||||||
|
mv cli/bin/cli-x64 ./runtipi-cli-linux-x64
|
||||||
|
|
||||||
- name: Create alpha release
|
- name: Create alpha release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
body: |
|
body: |
|
||||||
**${{ needs.create-tag.outputs.tagname }}**
|
**${{ needs.create-tag.outputs.tagname }}**
|
||||||
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
release_name: ${{ needs.create-tag.outputs.tagname }}
|
name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
draft: false
|
draft: false
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
files: |
|
||||||
- name: Upload X64 Linux CLI binary to release
|
runtipi-cli-linux-x64
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-x64
|
|
||||||
asset_name: runtipi-cli-linux-x64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload ARM64 Linux CLI binary to release
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-arm64
|
|
||||||
asset_name: runtipi-cli-linux-arm64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
144
.github/workflows/beta-release.yml
vendored
144
.github/workflows/beta-release.yml
vendored
|
@ -8,67 +8,98 @@ on:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
get-tag:
|
create-tag:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.get_tag.outputs.tag }}
|
tagname: ${{ steps.get_tag.outputs.tagname }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Get tag from package.json
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Get tag from VERSION file
|
|
||||||
id: get_tag
|
id: get_tag
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(npm run version --silent)
|
VERSION=$(npm run version --silent)
|
||||||
echo "tag=v${VERSION}-beta.${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
|
echo "tagname=v${VERSION}-beta.${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: rickstaa/action-create-tag@v1
|
||||||
|
with:
|
||||||
|
tag: ${{ steps.get_tag.outputs.tagname }}
|
||||||
|
|
||||||
|
build-worker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: create-tag
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push images
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./packages/worker/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ github.repository_owner }}/worker:${{ needs.create-tag.outputs.tagname }}
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache
|
||||||
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache,mode=max
|
||||||
|
|
||||||
build-images:
|
build-images:
|
||||||
needs: get-tag
|
needs: create-tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ghcr.io
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push images
|
- name: Build and push images
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: meienberger/runtipi:${{ needs.get-tag.outputs.tag }}
|
tags: ghcr.io/${{ github.repository_owner }}/runtipi:${{ needs.create-tag.outputs.tagname }}
|
||||||
cache-from: type=registry,ref=meienberger/runtipi:buildcache
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache
|
||||||
cache-to: type=registry,ref=meienberger/runtipi:buildcache,mode=max
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache,mode=max
|
||||||
|
|
||||||
build-cli:
|
build-cli:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: get-tag
|
needs: create-tag
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
@ -92,80 +123,49 @@ jobs:
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
|
|
||||||
- name: Set version
|
- name: Set version
|
||||||
run: pnpm -r --filter cli set-version ${{ needs.get-tag.outputs.tag }}
|
run: pnpm -r --filter cli set-version ${{ needs.create-tag.outputs.tagname }}
|
||||||
|
|
||||||
- name: Build CLI
|
- name: Build CLI
|
||||||
run: pnpm -r --filter cli package
|
run: pnpm -r --filter cli package
|
||||||
|
|
||||||
- name: Upload CLI
|
- name: Upload CLI
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: packages/cli/dist
|
path: packages/cli/dist
|
||||||
|
|
||||||
create-tag:
|
|
||||||
needs: [build-images, build-cli]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
tagname: ${{ steps.create_tag.outputs.tagname }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Create Tag
|
|
||||||
id: create_tag
|
|
||||||
uses: butlerlogic/action-autotag@stable
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
|
||||||
with:
|
|
||||||
tag_prefix: 'v'
|
|
||||||
tag_suffix: '-beta.${{ github.event.inputs.tag }}'
|
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [create-tag, build-images, build-cli]
|
needs: [create-tag, build-images, build-cli, build-worker]
|
||||||
outputs:
|
outputs:
|
||||||
id: ${{ steps.create_release.outputs.id }}
|
id: ${{ steps.create_release.outputs.id }}
|
||||||
steps:
|
steps:
|
||||||
- name: Download CLI
|
- name: Download CLI
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: cli
|
path: cli
|
||||||
|
|
||||||
|
- name: Rename CLI
|
||||||
|
run: |
|
||||||
|
mv cli/bin/cli-x64 ./runtipi-cli-linux-x64
|
||||||
|
mv cli/bin/cli-arm64 ./runtipi-cli-linux-arm64
|
||||||
|
|
||||||
- name: Create beta release
|
- name: Create beta release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
body: |
|
body: |
|
||||||
**${{ needs.create-tag.outputs.tagname }}**
|
**${{ needs.create-tag.outputs.tagname }}**
|
||||||
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
release_name: ${{ needs.create-tag.outputs.tagname }}
|
name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
draft: false
|
draft: false
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
files: |
|
||||||
- name: Upload X64 Linux CLI binary to release
|
runtipi-cli-linux-x64
|
||||||
uses: actions/upload-release-asset@v1
|
runtipi-cli-linux-arm64
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-x64
|
|
||||||
asset_name: runtipi-cli-linux-x64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload ARM64 Linux CLI binary to release
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-arm64
|
|
||||||
asset_name: runtipi-cli-linux-arm64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
e2e-tests:
|
e2e-tests:
|
||||||
needs: [create-tag, publish-release]
|
needs: [create-tag, publish-release]
|
||||||
|
|
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
|
@ -1,6 +1,6 @@
|
||||||
name: Tipi CI
|
name: Tipi CI
|
||||||
on:
|
on:
|
||||||
push:
|
pull_request:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
ROOT_FOLDER: /runtipi
|
ROOT_FOLDER: /runtipi
|
||||||
|
@ -38,14 +38,14 @@ jobs:
|
||||||
--health-retries 5
|
--health-retries 5
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
@ -76,7 +76,7 @@ jobs:
|
||||||
|
|
||||||
- name: Get number of CPU cores
|
- name: Get number of CPU cores
|
||||||
id: cpu-cores
|
id: cpu-cores
|
||||||
uses: SimenB/github-actions-cpu-cores@v1
|
uses: SimenB/github-actions-cpu-cores@v2
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pnpm run test --max-workers ${{ steps.cpu-cores.outputs.count }}
|
run: pnpm run test --max-workers ${{ steps.cpu-cores.outputs.count }}
|
||||||
|
@ -101,14 +101,14 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
|
2
.github/workflows/dependency-review.yml
vendored
2
.github/workflows/dependency-review.yml
vendored
|
@ -15,6 +15,6 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: 'Checkout Repository'
|
- name: 'Checkout Repository'
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
- name: 'Dependency Review'
|
- name: 'Dependency Review'
|
||||||
uses: actions/dependency-review-action@v3
|
uses: actions/dependency-review-action@v3
|
||||||
|
|
20
.github/workflows/e2e.yml
vendored
20
.github/workflows/e2e.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install SSH key
|
- name: Install SSH key
|
||||||
uses: shimataro/ssh-key-action@v2
|
uses: shimataro/ssh-key-action@v2
|
||||||
|
@ -72,9 +72,6 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
while ! ssh -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa root@${{ steps.get-droplet-ip.outputs.droplet_ip }} "echo 'SSH is ready'"; do sleep 5; done
|
while ! ssh -o StrictHostKeyChecking=no -i ~/.ssh/id_rsa root@${{ steps.get-droplet-ip.outputs.droplet_ip }} "echo 'SSH is ready'"; do sleep 5; done
|
||||||
|
|
||||||
- name: Wait 1 minute for Droplet to be ready
|
|
||||||
run: sleep 60
|
|
||||||
|
|
||||||
- name: Create docker group on Droplet
|
- name: Create docker group on Droplet
|
||||||
uses: fifsky/ssh-action@master
|
uses: fifsky/ssh-action@master
|
||||||
with:
|
with:
|
||||||
|
@ -85,12 +82,15 @@ jobs:
|
||||||
user: root
|
user: root
|
||||||
key: ${{ secrets.SSH_KEY }}
|
key: ${{ secrets.SSH_KEY }}
|
||||||
|
|
||||||
|
- name: Wait 90 seconds for Docker to be ready on Droplet
|
||||||
|
run: sleep 90
|
||||||
|
|
||||||
- name: Deploy app to Droplet
|
- name: Deploy app to Droplet
|
||||||
uses: fifsky/ssh-action@master
|
uses: fifsky/ssh-action@master
|
||||||
with:
|
with:
|
||||||
command: |
|
command: |
|
||||||
echo 'Downloading install script from GitHub'
|
echo 'Downloading install script from GitHub'
|
||||||
curl -s https://raw.githubusercontent.com/meienberger/runtipi/${{ inputs.version }}/scripts/install.sh > install.sh
|
curl -s https://raw.githubusercontent.com/runtipi/runtipi/${{ inputs.version }}/scripts/install.sh > install.sh
|
||||||
chmod +x install.sh
|
chmod +x install.sh
|
||||||
echo 'Running install script'
|
echo 'Running install script'
|
||||||
./install.sh --version ${{ inputs.version }}
|
./install.sh --version ${{ inputs.version }}
|
||||||
|
@ -110,9 +110,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [deploy]
|
needs: [deploy]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
@ -132,9 +132,9 @@ jobs:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pnpm-store-
|
${{ runner.os }}-pnpm-store-
|
||||||
|
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- name: Create .env.e2e file with Droplet IP
|
- name: Create .env.e2e file with Droplet IP
|
||||||
run: |
|
run: |
|
||||||
|
@ -181,7 +181,7 @@ jobs:
|
||||||
uses: actions/configure-pages@v3
|
uses: actions/configure-pages@v3
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v1
|
uses: actions/upload-pages-artifact@v2
|
||||||
with:
|
with:
|
||||||
path: playwright-report/
|
path: playwright-report/
|
||||||
|
|
||||||
|
|
41
.github/workflows/release-candidate.yml
vendored
41
.github/workflows/release-candidate.yml
vendored
|
@ -1,41 +0,0 @@
|
||||||
name: Release candidate
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Build images and publish RCs to DockerHub
|
|
||||||
build-images:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Get tag from VERSION file
|
|
||||||
id: meta
|
|
||||||
run: |
|
|
||||||
VERSION=$(npm run version --silent)
|
|
||||||
TAG=${VERSION}
|
|
||||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build and push images
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: meienberger/runtipi:rc-${{ steps.meta.outputs.TAG }}
|
|
||||||
cache-from: type=registry,ref=meienberger/runtipi:buildcache
|
|
||||||
cache-to: type=registry,ref=meienberger/runtipi:buildcache,mode=max
|
|
149
.github/workflows/release.yml
vendored
149
.github/workflows/release.yml
vendored
|
@ -3,68 +3,101 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
get-tag:
|
create-tag:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build-images, build-cli]
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.get_tag.outputs.tag }}
|
tagname: ${{ steps.get_tag.outputs.tagname }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Get tag from package.json
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 18
|
|
||||||
|
|
||||||
- name: Get tag from VERSION file
|
|
||||||
id: get_tag
|
id: get_tag
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(npm run version --silent)
|
VERSION=$(npm run version --silent)
|
||||||
echo "tag=v${VERSION}" >> $GITHUB_OUTPUT
|
echo "tagname=v${VERSION}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: rickstaa/action-create-tag@v1
|
||||||
|
with:
|
||||||
|
tag: ${{ steps.get_tag.outputs.tagname }}
|
||||||
|
|
||||||
build-images:
|
build-images:
|
||||||
if: github.repository == 'meienberger/runtipi'
|
if: github.repository == 'runtipi/runtipi'
|
||||||
needs: get-tag
|
needs: create-tag
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ghcr.io
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push images
|
- name: Build and push images
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: meienberger/runtipi:latest,meienberger/runtipi:${{ needs.get-tag.outputs.tag }}
|
tags: ghcr.io/${{ github.repository_owner }}/runtipi:${{ needs.create-tag.outputs.tagname }},ghcr.io/${{ github.repository_owner }}/runtipi:latest
|
||||||
cache-from: type=registry,ref=meienberger/runtipi:buildcache
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache
|
||||||
cache-to: type=registry,ref=meienberger/runtipi:buildcache,mode=max
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/runtipi:buildcache,mode=max
|
||||||
|
|
||||||
|
build-worker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: create-tag
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push images
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./packages/worker/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ghcr.io/${{ github.repository_owner }}/worker:${{ needs.create-tag.outputs.tagname }},ghcr.io/${{ github.repository_owner }}/worker:latest
|
||||||
|
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache
|
||||||
|
cache-to: type=registry,ref=ghcr.io/${{ github.repository_owner }}/worker:buildcache,mode=max
|
||||||
|
|
||||||
build-cli:
|
build-cli:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: get-tag
|
timeout-minutes: 10
|
||||||
|
needs: create-tag
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v2.2.4
|
- uses: pnpm/action-setup@v2.4.0
|
||||||
name: Install pnpm
|
name: Install pnpm
|
||||||
id: pnpm-install
|
id: pnpm-install
|
||||||
with:
|
with:
|
||||||
|
@ -88,75 +121,49 @@ jobs:
|
||||||
run: pnpm install
|
run: pnpm install
|
||||||
|
|
||||||
- name: Set version
|
- name: Set version
|
||||||
run: pnpm -r --filter cli set-version ${{ needs.get-tag.outputs.tag }}
|
run: pnpm -r --filter cli set-version ${{ needs.create-tag.outputs.tagname }}
|
||||||
|
|
||||||
- name: Build CLI
|
- name: Build CLI
|
||||||
run: pnpm -r --filter cli package
|
run: pnpm -r --filter cli package
|
||||||
|
|
||||||
- name: Upload CLI
|
- name: Upload CLI
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: packages/cli/dist
|
path: packages/cli/dist
|
||||||
|
|
||||||
create-tag:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build-images, build-cli]
|
|
||||||
outputs:
|
|
||||||
tagname: ${{ steps.create_tag.outputs.tagname }}
|
|
||||||
steps:
|
|
||||||
- name: Create Tag
|
|
||||||
id: create_tag
|
|
||||||
uses: butlerlogic/action-autotag@stable
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
|
||||||
with:
|
|
||||||
tag_prefix: 'v'
|
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [create-tag]
|
needs: [create-tag, build-images, build-worker, build-cli]
|
||||||
outputs:
|
outputs:
|
||||||
id: ${{ steps.create_release.outputs.id }}
|
id: ${{ steps.create_release.outputs.id }}
|
||||||
steps:
|
steps:
|
||||||
- name: Download CLI
|
- name: Download CLI
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: cli
|
name: cli
|
||||||
path: cli
|
path: cli
|
||||||
|
|
||||||
- name: Create beta release
|
- name: Rename CLI
|
||||||
|
run: |
|
||||||
|
mv cli/bin/cli-x64 ./runtipi-cli-linux-x64
|
||||||
|
mv cli/bin/cli-arm64 ./runtipi-cli-linux-arm64
|
||||||
|
|
||||||
|
- name: Create release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
body: |
|
body: |
|
||||||
**${{ needs.create-tag.outputs.tagname }}**
|
**${{ needs.create-tag.outputs.tagname }}**
|
||||||
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
tag_name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
release_name: ${{ needs.create-tag.outputs.tagname }}
|
name: ${{ needs.create-tag.outputs.tagname }}
|
||||||
draft: false
|
draft: false
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
files: |
|
||||||
- name: Upload X64 Linux CLI binary to release
|
runtipi-cli-linux-x64
|
||||||
uses: actions/upload-release-asset@v1
|
runtipi-cli-linux-arm64
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-x64
|
|
||||||
asset_name: runtipi-cli-linux-x64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
- name: Upload ARM64 Linux CLI binary to release
|
|
||||||
uses: actions/upload-release-asset@v1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
asset_path: cli/bin/cli-arm64
|
|
||||||
asset_name: runtipi-cli-linux-arm64
|
|
||||||
asset_content_type: application/octet-stream
|
|
||||||
|
|
||||||
e2e-tests:
|
e2e-tests:
|
||||||
needs: [create-tag, publish-release]
|
needs: [create-tag, publish-release]
|
||||||
|
@ -171,12 +178,12 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Promote release
|
- name: Promote release
|
||||||
uses: actions/github-script@v4
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
const id = '${{ needs.publish-release.outputs.id }}';
|
const id = '${{ needs.publish-release.outputs.id }}';
|
||||||
github.repos.updateRelease({
|
github.rest.repos.updateRelease({
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
release_id: id,
|
release_id: id,
|
||||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -54,8 +54,7 @@ node_modules/
|
||||||
/data/
|
/data/
|
||||||
/repos/
|
/repos/
|
||||||
/apps/
|
/apps/
|
||||||
traefik/shared
|
/traefik/
|
||||||
traefik/tls
|
|
||||||
|
|
||||||
# media folder
|
# media folder
|
||||||
media
|
media
|
||||||
|
@ -67,3 +66,4 @@ media
|
||||||
temp
|
temp
|
||||||
|
|
||||||
./traefik/
|
./traefik/
|
||||||
|
/user-config/
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
ARG NODE_VERSION="18.16"
|
ARG NODE_VERSION="20.10"
|
||||||
ARG ALPINE_VERSION="3.18"
|
ARG ALPINE_VERSION="3.18"
|
||||||
|
|
||||||
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION} AS node_base
|
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION} AS node_base
|
||||||
|
@ -14,6 +14,7 @@ WORKDIR /app
|
||||||
|
|
||||||
COPY ./pnpm-lock.yaml ./
|
COPY ./pnpm-lock.yaml ./
|
||||||
COPY ./pnpm-workspace.yaml ./
|
COPY ./pnpm-workspace.yaml ./
|
||||||
|
COPY ./patches ./patches
|
||||||
RUN pnpm fetch --no-scripts
|
RUN pnpm fetch --no-scripts
|
||||||
|
|
||||||
COPY ./package*.json ./
|
COPY ./package*.json ./
|
||||||
|
@ -32,7 +33,8 @@ RUN npm run build
|
||||||
FROM node_base AS app
|
FROM node_base AS app
|
||||||
|
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
# USER node
|
|
||||||
|
USER node
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
ARG NODE_VERSION="18.16"
|
ARG NODE_VERSION="20.10"
|
||||||
ARG ALPINE_VERSION="3.18"
|
ARG ALPINE_VERSION="3.18"
|
||||||
|
|
||||||
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION}
|
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION}
|
||||||
|
@ -8,6 +8,7 @@ RUN npm install pnpm -g
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY ./pnpm-lock.yaml ./
|
COPY ./pnpm-lock.yaml ./
|
||||||
|
COPY ./patches ./patches
|
||||||
RUN pnpm fetch --ignore-scripts
|
RUN pnpm fetch --ignore-scripts
|
||||||
|
|
||||||
COPY ./package*.json ./
|
COPY ./package*.json ./
|
||||||
|
|
72
README.md
72
README.md
|
@ -1,28 +1,29 @@
|
||||||
# Tipi — A personal homeserver for everyone
|
# Tipi — A personal homeserver for everyone
|
||||||
|
|
||||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||||
[](#contributors-)
|
[](#contributors-)
|
||||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||||
|
|
||||||
[](https://github.com/meienberger/runtipi/blob/master/LICENSE)
|
[](https://github.com/runtipi/runtipi/blob/master/LICENSE)
|
||||||
[](https://github.com/meienberger/runtipi/releases)
|
[](https://github.com/runtipi/runtipi/releases)
|
||||||

|

|
||||||
[](https://hub.docker.com/r/meienberger/runtipi/)
|
[](https://hub.docker.com/r/meienberger/runtipi/)
|
||||||
[](https://hub.docker.com/r/meienberger/runtipi/)
|
[](https://hub.docker.com/r/meienberger/runtipi/)
|
||||||

|

|
||||||
[](https://codecov.io/gh/meienberger/runtipi)
|
|
||||||
[](https://crowdin.com/project/runtipi)
|
[](https://crowdin.com/project/runtipi)
|
||||||
|
|
||||||
|
> 💡 Tipi is built with TypeScript, Next.js app router and Drizzle ORM! If you want to collaborate on a cool project, join the discussion on Discord!
|
||||||
|
|
||||||
#### Join the discussion
|
#### Join the discussion
|
||||||
|
|
||||||
[](https://discord.gg/Bu9qEPnHsc)
|
[](https://discord.gg/Bu9qEPnHsc)
|
||||||
[](https://matrix.to/#/#runtipi:matrix.org)
|
[](https://matrix.to/#/#runtipi:matrix.org)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
> ⚠️ Tipi is still at an early stage of development and issues are to be expected. Feel free to open an issue or pull request if you find a bug.
|
> ⚠️ Tipi is still at an early stage of development and issues are to be expected. Feel free to open an issue or pull request if you find a bug.
|
||||||
|
|
||||||
Tipi is a personal homeserver orchestrator that makes it easy to manage and run multiple services on a single server. It is based on Docker and comes with a simple web interface to manage your services. Tipi is designed to be easy to use, so you don't have to worry about manual configuration or networking. Simply install Tipi on your server and use the web interface to add and manage services. You can see a list of available services in the [App Store repo](https://github.com/meienberger/runtipi-appstore) and request new ones if you don't see what you need. To get started, follow the installation instructions below.
|
Tipi is a personal homeserver orchestrator that makes it easy to manage and run multiple services on a single server. It is based on Docker and comes with a simple web interface to manage your services. Tipi is designed to be easy to use, so you don't have to worry about manual configuration or networking. Simply install Tipi on your server and use the web interface to add and manage services. You can see a list of available services in the [App Store repo](https://github.com/runtipi/runtipi-appstore) and request new ones if you don't see what you need. To get started, follow the installation instructions below.
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
|
@ -51,7 +52,7 @@ We are looking for contributions of all kinds. If you know design, development,
|
||||||
|
|
||||||
## 📜 License
|
## 📜 License
|
||||||
|
|
||||||
[](https://github.com/meienberger/runtipi/blob/master/LICENSE)
|
[](https://github.com/runtipi/runtipi/blob/master/LICENSE)
|
||||||
|
|
||||||
Tipi is licensed under the GNU General Public License v3.0. TL;DR — You may copy, distribute and modify the software as long as you track changes/dates in source files. Any modifications to or software including (via compiler) GPL-licensed code must also be made available under the GPL along with build & install instructions.
|
Tipi is licensed under the GNU General Public License v3.0. TL;DR — You may copy, distribute and modify the software as long as you track changes/dates in source files. Any modifications to or software including (via compiler) GPL-licensed code must also be made available under the GPL along with build & install instructions.
|
||||||
|
|
||||||
|
@ -72,36 +73,36 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
|
||||||
<table>
|
<table>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://meienberger.dev/"><img src="https://avatars.githubusercontent.com/u/47644445?v=4?s=100" width="100px;" alt="Nicolas Meienberger"/><br /><sub><b>Nicolas Meienberger</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=meienberger" title="Code">💻</a> <a href="#infra-meienberger" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/meienberger/runtipi/commits?author=meienberger" title="Tests">⚠️</a> <a href="https://github.com/meienberger/runtipi/commits?author=meienberger" title="Documentation">📖</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://meienberger.dev/"><img src="https://avatars.githubusercontent.com/u/47644445?v=4?s=100" width="100px;" alt="Nicolas Meienberger"/><br /><sub><b>Nicolas Meienberger</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=meienberger" title="Code">💻</a> <a href="#infra-meienberger" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/runtipi/runtipi/commits?author=meienberger" title="Tests">⚠️</a> <a href="https://github.com/runtipi/runtipi/commits?author=meienberger" title="Documentation">📖</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/ArneNaessens"><img src="https://avatars.githubusercontent.com/u/16622722?v=4?s=100" width="100px;" alt="ArneNaessens"/><br /><sub><b>ArneNaessens</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=ArneNaessens" title="Code">💻</a> <a href="#ideas-ArneNaessens" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/meienberger/runtipi/commits?author=ArneNaessens" title="Tests">⚠️</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/ArneNaessens"><img src="https://avatars.githubusercontent.com/u/16622722?v=4?s=100" width="100px;" alt="ArneNaessens"/><br /><sub><b>ArneNaessens</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=ArneNaessens" title="Code">💻</a> <a href="#ideas-ArneNaessens" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/runtipi/runtipi/commits?author=ArneNaessens" title="Tests">⚠️</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/DrMxrcy"><img src="https://avatars.githubusercontent.com/u/58747968?v=4?s=100" width="100px;" alt="DrMxrcy"/><br /><sub><b>DrMxrcy</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=DrMxrcy" title="Code">💻</a> <a href="#ideas-DrMxrcy" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/meienberger/runtipi/commits?author=DrMxrcy" title="Tests">⚠️</a> <a href="#content-DrMxrcy" title="Content">🖋</a> <a href="#promotion-DrMxrcy" title="Promotion">📣</a> <a href="#question-DrMxrcy" title="Answering Questions">💬</a> <a href="https://github.com/meienberger/runtipi/pulls?q=is%3Apr+reviewed-by%3ADrMxrcy" title="Reviewed Pull Requests">👀</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/DrMxrcy"><img src="https://avatars.githubusercontent.com/u/58747968?v=4?s=100" width="100px;" alt="DrMxrcy"/><br /><sub><b>DrMxrcy</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=DrMxrcy" title="Code">💻</a> <a href="#ideas-DrMxrcy" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/runtipi/runtipi/commits?author=DrMxrcy" title="Tests">⚠️</a> <a href="#content-DrMxrcy" title="Content">🖋</a> <a href="#promotion-DrMxrcy" title="Promotion">📣</a> <a href="#question-DrMxrcy" title="Answering Questions">💬</a> <a href="https://github.com/runtipi/runtipi/pulls?q=is%3Apr+reviewed-by%3ADrMxrcy" title="Reviewed Pull Requests">👀</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://cobre.dev"><img src="https://avatars.githubusercontent.com/u/36574329?v=4?s=100" width="100px;" alt="Cooper"/><br /><sub><b>Cooper</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=CobreDev" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://cobre.dev"><img src="https://avatars.githubusercontent.com/u/36574329?v=4?s=100" width="100px;" alt="Cooper"/><br /><sub><b>Cooper</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=CobreDev" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JTruj1ll0923"><img src="https://avatars.githubusercontent.com/u/6656643?v=4?s=100" width="100px;" alt="JTruj1ll0923"/><br /><sub><b>JTruj1ll0923</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=JTruj1ll0923" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JTruj1ll0923"><img src="https://avatars.githubusercontent.com/u/6656643?v=4?s=100" width="100px;" alt="JTruj1ll0923"/><br /><sub><b>JTruj1ll0923</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=JTruj1ll0923" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Stetsed"><img src="https://avatars.githubusercontent.com/u/33891782?v=4?s=100" width="100px;" alt="Stetsed"/><br /><sub><b>Stetsed</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=Stetsed" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Stetsed"><img src="https://avatars.githubusercontent.com/u/33891782?v=4?s=100" width="100px;" alt="Stetsed"/><br /><sub><b>Stetsed</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=Stetsed" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/blushell"><img src="https://avatars.githubusercontent.com/u/3621606?v=4?s=100" width="100px;" alt="Jones_Town"/><br /><sub><b>Jones_Town</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=blushell" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/blushell"><img src="https://avatars.githubusercontent.com/u/3621606?v=4?s=100" width="100px;" alt="Jones_Town"/><br /><sub><b>Jones_Town</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=blushell" title="Code">💻</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://rushichaudhari.github.io/"><img src="https://avatars.githubusercontent.com/u/6279035?v=4?s=100" width="100px;" alt="Rushi Chaudhari"/><br /><sub><b>Rushi Chaudhari</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=rushic24" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://rushichaudhari.github.io/"><img src="https://avatars.githubusercontent.com/u/6279035?v=4?s=100" width="100px;" alt="Rushi Chaudhari"/><br /><sub><b>Rushi Chaudhari</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=rushic24" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/rblaine95"><img src="https://avatars.githubusercontent.com/u/4052340?v=4?s=100" width="100px;" alt="Robert Blaine"/><br /><sub><b>Robert Blaine</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=rblaine95" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/rblaine95"><img src="https://avatars.githubusercontent.com/u/4052340?v=4?s=100" width="100px;" alt="Robert Blaine"/><br /><sub><b>Robert Blaine</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=rblaine95" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://sethforprivacy.com"><img src="https://avatars.githubusercontent.com/u/40500387?v=4?s=100" width="100px;" alt="Seth For Privacy"/><br /><sub><b>Seth For Privacy</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=sethforprivacy" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://sethforprivacy.com"><img src="https://avatars.githubusercontent.com/u/40500387?v=4?s=100" width="100px;" alt="Seth For Privacy"/><br /><sub><b>Seth For Privacy</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=sethforprivacy" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/hqwuzhaoyi"><img src="https://avatars.githubusercontent.com/u/44605072?v=4?s=100" width="100px;" alt="Prajna"/><br /><sub><b>Prajna</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=hqwuzhaoyi" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/hqwuzhaoyi"><img src="https://avatars.githubusercontent.com/u/44605072?v=4?s=100" width="100px;" alt="Prajna"/><br /><sub><b>Prajna</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=hqwuzhaoyi" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/justincmoy"><img src="https://avatars.githubusercontent.com/u/14875982?v=4?s=100" width="100px;" alt="Justin Moy"/><br /><sub><b>Justin Moy</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=justincmoy" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/justincmoy"><img src="https://avatars.githubusercontent.com/u/14875982?v=4?s=100" width="100px;" alt="Justin Moy"/><br /><sub><b>Justin Moy</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=justincmoy" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/dextreem"><img src="https://avatars.githubusercontent.com/u/11060652?v=4?s=100" width="100px;" alt="dextreem"/><br /><sub><b>dextreem</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=dextreem" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/dextreem"><img src="https://avatars.githubusercontent.com/u/11060652?v=4?s=100" width="100px;" alt="dextreem"/><br /><sub><b>dextreem</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=dextreem" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/iBicha"><img src="https://avatars.githubusercontent.com/u/17722782?v=4?s=100" width="100px;" alt="Brahim Hadriche"/><br /><sub><b>Brahim Hadriche</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=iBicha" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/iBicha"><img src="https://avatars.githubusercontent.com/u/17722782?v=4?s=100" width="100px;" alt="Brahim Hadriche"/><br /><sub><b>Brahim Hadriche</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=iBicha" title="Code">💻</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://andrewbrereton.com"><img src="https://avatars.githubusercontent.com/u/682893?v=4?s=100" width="100px;" alt="Andrew Brereton"/><br /><sub><b>Andrew Brereton</b></sub></a><br /><a href="#content-andrewbrereton" title="Content">🖋</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://andrewbrereton.com"><img src="https://avatars.githubusercontent.com/u/682893?v=4?s=100" width="100px;" alt="Andrew Brereton"/><br /><sub><b>Andrew Brereton</b></sub></a><br /><a href="#content-andrewbrereton" title="Content">🖋</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://fsackur.github.io/"><img src="https://avatars.githubusercontent.com/u/3678789?v=4?s=100" width="100px;" alt="Freddie Sackur"/><br /><sub><b>Freddie Sackur</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=fsackur" title="Code">💻</a> <a href="https://github.com/meienberger/runtipi/commits?author=fsackur" title="Documentation">📖</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://fsackur.github.io/"><img src="https://avatars.githubusercontent.com/u/3678789?v=4?s=100" width="100px;" alt="Freddie Sackur"/><br /><sub><b>Freddie Sackur</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=fsackur" title="Code">💻</a> <a href="https://github.com/runtipi/runtipi/commits?author=fsackur" title="Documentation">📖</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="http://innocentius.github.io"><img src="https://avatars.githubusercontent.com/u/5344432?v=4?s=100" width="100px;" alt="Innocentius"/><br /><sub><b>Innocentius</b></sub></a><br /><a href="#translation-innocentius" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="http://innocentius.github.io"><img src="https://avatars.githubusercontent.com/u/5344432?v=4?s=100" width="100px;" alt="Innocentius"/><br /><sub><b>Innocentius</b></sub></a><br /><a href="#translation-innocentius" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/TetrisIQ"><img src="https://avatars.githubusercontent.com/u/24246993?v=4?s=100" width="100px;" alt="Alex"/><br /><sub><b>Alex</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=TetrisIQ" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/TetrisIQ"><img src="https://avatars.githubusercontent.com/u/24246993?v=4?s=100" width="100px;" alt="Alex"/><br /><sub><b>Alex</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=TetrisIQ" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://ryanc.cc"><img src="https://avatars.githubusercontent.com/u/21301288?v=4?s=100" width="100px;" alt="Ryan Wang"/><br /><sub><b>Ryan Wang</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=ruibaby" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://ryanc.cc"><img src="https://avatars.githubusercontent.com/u/21301288?v=4?s=100" width="100px;" alt="Ryan Wang"/><br /><sub><b>Ryan Wang</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=ruibaby" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/simonandr"><img src="https://avatars.githubusercontent.com/u/48092304?v=4?s=100" width="100px;" alt="simonandr"/><br /><sub><b>simonandr</b></sub></a><br /><a href="#content-simonandr" title="Content">🖋</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/simonandr"><img src="https://avatars.githubusercontent.com/u/48092304?v=4?s=100" width="100px;" alt="simonandr"/><br /><sub><b>simonandr</b></sub></a><br /><a href="#content-simonandr" title="Content">🖋</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/demizeu"><img src="https://avatars.githubusercontent.com/u/121183951?v=4?s=100" width="100px;" alt="iepure"/><br /><sub><b>iepure</b></sub></a><br /><a href="#translation-demizeu" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/demizeu"><img src="https://avatars.githubusercontent.com/u/121183951?v=4?s=100" width="100px;" alt="iepure"/><br /><sub><b>iepure</b></sub></a><br /><a href="#translation-demizeu" title="Translation">🌍</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/SergeyKodolov"><img src="https://avatars.githubusercontent.com/u/35339452?v=4?s=100" width="100px;" alt="Sergey Kodolov"/><br /><sub><b>Sergey Kodolov</b></sub></a><br /><a href="#translation-SergeyKodolov" title="Translation">🌍</a> <a href="https://github.com/meienberger/runtipi/commits?author=SergeyKodolov" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/SergeyKodolov"><img src="https://avatars.githubusercontent.com/u/35339452?v=4?s=100" width="100px;" alt="Sergey Kodolov"/><br /><sub><b>Sergey Kodolov</b></sub></a><br /><a href="#translation-SergeyKodolov" title="Translation">🌍</a> <a href="https://github.com/runtipi/runtipi/commits?author=SergeyKodolov" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/sclaren"><img src="https://avatars.githubusercontent.com/u/915292?v=4?s=100" width="100px;" alt="sclaren"/><br /><sub><b>sclaren</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=sclaren" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/sclaren"><img src="https://avatars.githubusercontent.com/u/915292?v=4?s=100" width="100px;" alt="sclaren"/><br /><sub><b>sclaren</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=sclaren" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/mcmeel"><img src="https://avatars.githubusercontent.com/u/13773536?v=4?s=100" width="100px;" alt="mcmeel"/><br /><sub><b>mcmeel</b></sub></a><br /><a href="#question-mcmeel" title="Answering Questions">💬</a> <a href="#ideas-mcmeel" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/meienberger/runtipi/commits?author=mcmeel" title="Code">💻</a> <a href="https://github.com/meienberger/runtipi/commits?author=mcmeel" title="Documentation">📖</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/mcmeel"><img src="https://avatars.githubusercontent.com/u/13773536?v=4?s=100" width="100px;" alt="mcmeel"/><br /><sub><b>mcmeel</b></sub></a><br /><a href="#question-mcmeel" title="Answering Questions">💬</a> <a href="#ideas-mcmeel" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/runtipi/runtipi/commits?author=mcmeel" title="Code">💻</a> <a href="https://github.com/runtipi/runtipi/commits?author=mcmeel" title="Documentation">📖</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/NoisyFridge"><img src="https://avatars.githubusercontent.com/u/73795785?v=4?s=100" width="100px;" alt="NoisyFridge"/><br /><sub><b>NoisyFridge</b></sub></a><br /><a href="#translation-NoisyFridge" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/NoisyFridge"><img src="https://avatars.githubusercontent.com/u/73795785?v=4?s=100" width="100px;" alt="NoisyFridge"/><br /><sub><b>NoisyFridge</b></sub></a><br /><a href="#translation-NoisyFridge" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Bvoxl"><img src="https://avatars.githubusercontent.com/u/67489519?v=4?s=100" width="100px;" alt="Bvoxl"/><br /><sub><b>Bvoxl</b></sub></a><br /><a href="#translation-Bvoxl" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Bvoxl"><img src="https://avatars.githubusercontent.com/u/67489519?v=4?s=100" width="100px;" alt="Bvoxl"/><br /><sub><b>Bvoxl</b></sub></a><br /><a href="#translation-Bvoxl" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/m-lab-0"><img src="https://avatars.githubusercontent.com/u/116570617?v=4?s=100" width="100px;" alt="m-lab-0"/><br /><sub><b>m-lab-0</b></sub></a><br /><a href="#translation-m-lab-0" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/m-lab-0"><img src="https://avatars.githubusercontent.com/u/116570617?v=4?s=100" width="100px;" alt="m-lab-0"/><br /><sub><b>m-lab-0</b></sub></a><br /><a href="#translation-m-lab-0" title="Translation">🌍</a></td>
|
||||||
|
@ -110,15 +111,20 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Schmanko"><img src="https://avatars.githubusercontent.com/u/94195393?v=4?s=100" width="100px;" alt="Schmanko"/><br /><sub><b>Schmanko</b></sub></a><br /><a href="#translation-Schmanko" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Schmanko"><img src="https://avatars.githubusercontent.com/u/94195393?v=4?s=100" width="100px;" alt="Schmanko"/><br /><sub><b>Schmanko</b></sub></a><br /><a href="#translation-Schmanko" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://micro.nghialele.com"><img src="https://avatars.githubusercontent.com/u/129353223?v=4?s=100" width="100px;" alt="Nghia Lele"/><br /><sub><b>Nghia Lele</b></sub></a><br /><a href="#translation-nghialele" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://micro.nghialele.com"><img src="https://avatars.githubusercontent.com/u/129353223?v=4?s=100" width="100px;" alt="Nghia Lele"/><br /><sub><b>Nghia Lele</b></sub></a><br /><a href="#translation-nghialele" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/amusingimpala75"><img src="https://avatars.githubusercontent.com/u/69653100?v=4?s=100" width="100px;" alt="amusingimpala75"/><br /><sub><b>amusingimpala75</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=amusingimpala75" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/amusingimpala75"><img src="https://avatars.githubusercontent.com/u/69653100?v=4?s=100" width="100px;" alt="amusingimpala75"/><br /><sub><b>amusingimpala75</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=amusingimpala75" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="http://m1n.omg.lol"><img src="https://avatars.githubusercontent.com/u/54779580?v=4?s=100" width="100px;" alt="David"/><br /><sub><b>David</b></sub></a><br /><a href="#translation-M1n-4d316e" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="http://m1n.omg.lol"><img src="https://avatars.githubusercontent.com/u/54779580?v=4?s=100" width="100px;" alt="David"/><br /><sub><b>David</b></sub></a><br /><a href="#translation-M1n-4d316e" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/steveiliop56"><img src="https://avatars.githubusercontent.com/u/106091011?v=4?s=100" width="100px;" alt="Stavros Iliopoulos"/><br /><sub><b>Stavros Iliopoulos</b></sub></a><br /><a href="#translation-steveiliop56" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/steveiliop56"><img src="https://avatars.githubusercontent.com/u/106091011?v=4?s=100" width="100px;" alt="Stavros Iliopoulos"/><br /><sub><b>Stavros Iliopoulos</b></sub></a><br /><a href="#translation-steveiliop56" title="Translation">🌍</a> <a href="https://github.com/runtipi/runtipi/commits?author=steveiliop56" title="Code">💻</a> <a href="https://github.com/runtipi/runtipi/commits?author=steveiliop56" title="Tests">⚠️</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/loxiry"><img src="https://avatars.githubusercontent.com/u/86959495?v=4?s=100" width="100px;" alt="loxiry"/><br /><sub><b>loxiry</b></sub></a><br /><a href="#translation-loxiry" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/loxiry"><img src="https://avatars.githubusercontent.com/u/86959495?v=4?s=100" width="100px;" alt="loxiry"/><br /><sub><b>loxiry</b></sub></a><br /><a href="#translation-loxiry" title="Translation">🌍</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JigSawFr"><img src="https://avatars.githubusercontent.com/u/5781907?v=4?s=100" width="100px;" alt="JigSaw"/><br /><sub><b>JigSaw</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=JigSawFr" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/JigSawFr"><img src="https://avatars.githubusercontent.com/u/5781907?v=4?s=100" width="100px;" alt="JigSaw"/><br /><sub><b>JigSaw</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=JigSawFr" title="Code">💻</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/DireMunchkin"><img src="https://avatars.githubusercontent.com/u/1665676?v=4?s=100" width="100px;" alt="DireMunchkin"/><br /><sub><b>DireMunchkin</b></sub></a><br /><a href="https://github.com/meienberger/runtipi/commits?author=DireMunchkin" title="Code">💻</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/DireMunchkin"><img src="https://avatars.githubusercontent.com/u/1665676?v=4?s=100" width="100px;" alt="DireMunchkin"/><br /><sub><b>DireMunchkin</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=DireMunchkin" title="Code">💻</a></td>
|
||||||
<td align="center" valign="top" width="14.28%"><a href="https://github.com/FabioCingottini"><img src="https://avatars.githubusercontent.com/u/32102735?v=4?s=100" width="100px;" alt="Fabio Cingottini"/><br /><sub><b>Fabio Cingottini</b></sub></a><br /><a href="#translation-FabioCingottini" title="Translation">🌍</a></td>
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/FabioCingottini"><img src="https://avatars.githubusercontent.com/u/32102735?v=4?s=100" width="100px;" alt="Fabio Cingottini"/><br /><sub><b>Fabio Cingottini</b></sub></a><br /><a href="#translation-FabioCingottini" title="Translation">🌍</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/itsrllyhim"><img src="https://avatars.githubusercontent.com/u/143047010?v=4?s=100" width="100px;" alt="him"/><br /><sub><b>him</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=itsrllyhim" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="http://cchalop1.com"><img src="https://avatars.githubusercontent.com/u/28163855?v=4?s=100" width="100px;" alt="CHALOPIN Clément"/><br /><sub><b>CHALOPIN Clément</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=cchalop1" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/geetansh"><img src="https://avatars.githubusercontent.com/u/9976198?v=4?s=100" width="100px;" alt="Geetansh Jindal"/><br /><sub><b>Geetansh Jindal</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=geetansh" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/0livier"><img src="https://avatars.githubusercontent.com/u/10607?v=4?s=100" width="100px;" alt="Olivier Garcia"/><br /><sub><b>Olivier Garcia</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=0livier" title="Code">💻</a></td>
|
||||||
|
<td align="center" valign="top" width="14.28%"><a href="https://github.com/qcoudeyr"><img src="https://avatars.githubusercontent.com/u/124463277?v=4?s=100" width="100px;" alt="qcoudeyr"/><br /><sub><b>qcoudeyr</b></sub></a><br /><a href="https://github.com/runtipi/runtipi/commits?author=qcoudeyr" title="Code">💻</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
|
@ -5,3 +5,8 @@ ignore:
|
||||||
- 'screenshots'
|
- 'screenshots'
|
||||||
- '**/*.json'
|
- '**/*.json'
|
||||||
- '**/tests/**'
|
- '**/tests/**'
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
informational: true
|
||||||
|
|
|
@ -40,13 +40,13 @@ services:
|
||||||
|
|
||||||
tipi-redis:
|
tipi-redis:
|
||||||
container_name: tipi-redis
|
container_name: tipi-redis
|
||||||
image: redis:alpine
|
image: redis:7.2.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
command: redis-server --requirepass ${REDIS_PASSWORD}
|
command: redis-server --requirepass ${REDIS_PASSWORD}
|
||||||
ports:
|
ports:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/redis:/data
|
- redisdata:/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ['CMD', 'redis-cli', 'ping']
|
test: ['CMD', 'redis-cli', 'ping']
|
||||||
interval: 5s
|
interval: 5s
|
||||||
|
@ -55,6 +55,43 @@ services:
|
||||||
networks:
|
networks:
|
||||||
- tipi_main_network
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-worker:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./packages/worker/Dockerfile.dev
|
||||||
|
container_name: tipi-worker
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'curl', '-f', 'http://localhost:3000/healthcheck']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
start_period: 5s
|
||||||
|
depends_on:
|
||||||
|
tipi-db:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-redis:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
NODE_ENV: development
|
||||||
|
volumes:
|
||||||
|
# Dev mode
|
||||||
|
- ${PWD}/packages/worker/src:/app/packages/worker/src
|
||||||
|
# Production mode
|
||||||
|
- /proc:/host/proc:ro
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
- ${PWD}/.env:/app/.env
|
||||||
|
- ${PWD}/state:/app/state
|
||||||
|
- ${PWD}/repos:/app/repos
|
||||||
|
- ${PWD}/apps:/app/apps
|
||||||
|
- ${STORAGE_PATH:-$PWD}/app-data:/storage/app-data
|
||||||
|
- ${PWD}/logs:/app/logs
|
||||||
|
- ${PWD}/traefik:/app/traefik
|
||||||
|
- ${PWD}/user-config:/app/user-config
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
tipi-dashboard:
|
tipi-dashboard:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
|
@ -65,6 +102,8 @@ services:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
tipi-redis:
|
tipi-redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
tipi-worker:
|
||||||
|
condition: service_healthy
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
|
@ -84,7 +123,7 @@ services:
|
||||||
- ${PWD}/apps:/runtipi/apps
|
- ${PWD}/apps:/runtipi/apps
|
||||||
- ${PWD}/logs:/app/logs
|
- ${PWD}/logs:/app/logs
|
||||||
- ${PWD}/traefik:/runtipi/traefik
|
- ${PWD}/traefik:/runtipi/traefik
|
||||||
- ${STORAGE_PATH}:/app/storage
|
- ${STORAGE_PATH:-$PWD}:/app/storage
|
||||||
labels:
|
labels:
|
||||||
traefik.enable: true
|
traefik.enable: true
|
||||||
traefik.http.services.dashboard.loadbalancer.server.port: 3000
|
traefik.http.services.dashboard.loadbalancer.server.port: 3000
|
||||||
|
@ -103,9 +142,8 @@ services:
|
||||||
networks:
|
networks:
|
||||||
tipi_main_network:
|
tipi_main_network:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
ipam:
|
name: runtipi_tipi_main_network
|
||||||
driver: default
|
|
||||||
config:
|
|
||||||
- subnet: 10.21.21.0/24
|
|
||||||
volumes:
|
volumes:
|
||||||
pgdata:
|
pgdata:
|
||||||
|
redisdata:
|
||||||
|
|
146
docker-compose.prod.yml
Normal file
146
docker-compose.prod.yml
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
version: '3.7'
|
||||||
|
|
||||||
|
services:
|
||||||
|
tipi-reverse-proxy:
|
||||||
|
container_name: tipi-reverse-proxy
|
||||||
|
image: traefik:v2.8
|
||||||
|
restart: on-failure
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
- 443:443
|
||||||
|
- 8080:8080
|
||||||
|
command: --providers.docker
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
- ${PWD}/traefik:/root/.config
|
||||||
|
- ${PWD}/traefik/shared:/shared
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-db:
|
||||||
|
container_name: tipi-db
|
||||||
|
image: postgres:14
|
||||||
|
restart: unless-stopped
|
||||||
|
stop_grace_period: 1m
|
||||||
|
volumes:
|
||||||
|
- pgdata:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
|
POSTGRES_USER: tipi
|
||||||
|
POSTGRES_DB: tipi
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD-SHELL', 'pg_isready -d tipi -U tipi']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-redis:
|
||||||
|
container_name: tipi-redis
|
||||||
|
image: redis:7.2.0
|
||||||
|
restart: unless-stopped
|
||||||
|
command: redis-server --requirepass ${REDIS_PASSWORD}
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
volumes:
|
||||||
|
- redisdata:/data
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'redis-cli', 'ping']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-worker:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./packages/worker/Dockerfile
|
||||||
|
container_name: tipi-worker
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD', 'curl', '-f', 'http://localhost:3000/healthcheck']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
start_period: 5s
|
||||||
|
depends_on:
|
||||||
|
tipi-db:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-redis:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
NODE_ENV: production
|
||||||
|
volumes:
|
||||||
|
- /proc:/host/proc
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
- ${PWD}/.env:/app/.env
|
||||||
|
- ${PWD}/state:/app/state
|
||||||
|
- ${PWD}/repos:/app/repos
|
||||||
|
- ${PWD}/apps:/app/apps
|
||||||
|
- ${STORAGE_PATH:-$PWD}/app-data:/storage/app-data
|
||||||
|
- ${PWD}/logs:/app/logs
|
||||||
|
- ${PWD}/traefik:/app/traefik
|
||||||
|
- ${PWD}/user-config:/app/user-config
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-dashboard:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: tipi-dashboard
|
||||||
|
depends_on:
|
||||||
|
tipi-db:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-redis:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-worker:
|
||||||
|
condition: service_healthy
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
NODE_ENV: development
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
ports:
|
||||||
|
- 3000:3000
|
||||||
|
volumes:
|
||||||
|
- ${PWD}/.env:/runtipi/.env
|
||||||
|
- ${PWD}/state:/runtipi/state
|
||||||
|
- ${PWD}/repos:/runtipi/repos:ro
|
||||||
|
- ${PWD}/apps:/runtipi/apps
|
||||||
|
- ${PWD}/logs:/app/logs
|
||||||
|
- ${PWD}/traefik:/runtipi/traefik
|
||||||
|
- ${STORAGE_PATH}:/app/storage
|
||||||
|
labels:
|
||||||
|
traefik.enable: true
|
||||||
|
traefik.http.services.dashboard.loadbalancer.server.port: 3000
|
||||||
|
traefik.http.middlewares.redirect-to-https.redirectscheme.scheme: https
|
||||||
|
# Local ip
|
||||||
|
traefik.http.routers.dashboard.rule: PathPrefix("/")
|
||||||
|
traefik.http.routers.dashboard.service: dashboard
|
||||||
|
traefik.http.routers.dashboard.entrypoints: web
|
||||||
|
# Local domain
|
||||||
|
traefik.http.routers.dashboard-local-insecure.rule: Host(`${LOCAL_DOMAIN}`)
|
||||||
|
traefik.http.routers.dashboard-local-insecure.entrypoints: web
|
||||||
|
traefik.http.routers.dashboard-local-insecure.service: dashboard
|
||||||
|
traefik.http.routers.dashboard-local-insecure.middlewares: redirect-to-https
|
||||||
|
# secure
|
||||||
|
traefik.http.routers.dashboard-local.rule: Host(`${LOCAL_DOMAIN}`)
|
||||||
|
traefik.http.routers.dashboard-local.entrypoints: websecure
|
||||||
|
traefik.http.routers.dashboard-local.tls: true
|
||||||
|
traefik.http.routers.dashboard-local.service: dashboard
|
||||||
|
|
||||||
|
networks:
|
||||||
|
tipi_main_network:
|
||||||
|
driver: bridge
|
||||||
|
name: runtipi_tipi_main_network
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
pgdata:
|
||||||
|
redisdata:
|
|
@ -1,11 +1,11 @@
|
||||||
import { test, expect } from '@playwright/test';
|
import { test, expect } from '@playwright/test';
|
||||||
import { eq } from 'drizzle-orm';
|
|
||||||
import { userTable } from '@/server/db/schema';
|
|
||||||
import { loginUser } from './fixtures/fixtures';
|
import { loginUser } from './fixtures/fixtures';
|
||||||
import { clearDatabase, db } from './helpers/db';
|
import { clearDatabase } from './helpers/db';
|
||||||
import { testUser } from './helpers/constants';
|
import { testUser } from './helpers/constants';
|
||||||
|
import { setSettings } from './helpers/settings';
|
||||||
|
|
||||||
test.beforeEach(async ({ page }) => {
|
test.beforeEach(async ({ page }) => {
|
||||||
|
await setSettings({});
|
||||||
await clearDatabase();
|
await clearDatabase();
|
||||||
await loginUser(page);
|
await loginUser(page);
|
||||||
|
|
||||||
|
@ -34,14 +34,48 @@ test('user can change their password', async ({ page }) => {
|
||||||
await expect(page.getByRole('heading', { name: 'Dashboard' })).toBeVisible();
|
await expect(page.getByRole('heading', { name: 'Dashboard' })).toBeVisible();
|
||||||
});
|
});
|
||||||
|
|
||||||
test('user can change their language and it is persisted in database', async ({ page }) => {
|
test('user can change their email', async ({ page }) => {
|
||||||
await page.getByRole('tab', { name: 'Settings' }).click();
|
// Change email
|
||||||
|
const newEmail = 'tester2@test.com';
|
||||||
|
|
||||||
await page.getByRole('combobox', { name: 'Language Help translate Tipi' }).click();
|
await page.getByRole('tab', { name: 'Security' }).click();
|
||||||
await page.getByRole('option', { name: 'Français' }).click();
|
await page.getByRole('button', { name: 'Change username' }).click();
|
||||||
|
await page.getByPlaceholder('New username').click();
|
||||||
|
await page.getByPlaceholder('New username').fill(newEmail);
|
||||||
|
|
||||||
await expect(page.getByText('Paramètres utilisateur')).toBeVisible();
|
// Wrong password
|
||||||
|
await page.getByPlaceholder('Password', { exact: true }).click();
|
||||||
|
await page.getByPlaceholder('Password', { exact: true }).fill('incorrect');
|
||||||
|
|
||||||
const dbUser = await db.query.userTable.findFirst({ where: eq(userTable.username, testUser.email) });
|
await page.getByRole('button', { name: 'Change username' }).click();
|
||||||
expect(dbUser?.locale).toEqual('fr-FR');
|
|
||||||
|
await expect(page.getByText('Invalid password')).toBeVisible();
|
||||||
|
|
||||||
|
// Wrong email
|
||||||
|
await page.getByPlaceholder('Password', { exact: true }).click();
|
||||||
|
await page.getByPlaceholder('Password', { exact: true }).fill(testUser.password);
|
||||||
|
await page.getByPlaceholder('New username').click();
|
||||||
|
await page.getByPlaceholder('New username').fill('incorrect');
|
||||||
|
|
||||||
|
await page.getByRole('button', { name: 'Change username' }).click();
|
||||||
|
|
||||||
|
await expect(page.getByText('Must be a valid email address')).toBeVisible();
|
||||||
|
|
||||||
|
// Correct email and password
|
||||||
|
await page.getByPlaceholder('New username').click();
|
||||||
|
await page.getByPlaceholder('New username').fill(newEmail);
|
||||||
|
|
||||||
|
await page.getByRole('button', { name: 'Change username' }).click();
|
||||||
|
|
||||||
|
await expect(page.getByText('Username changed successfully')).toBeVisible();
|
||||||
|
|
||||||
|
// Login with new email
|
||||||
|
await page.getByPlaceholder('you@example.com').click();
|
||||||
|
await page.getByPlaceholder('you@example.com').fill(newEmail);
|
||||||
|
await page.getByPlaceholder('Your password').click();
|
||||||
|
await page.getByPlaceholder('Your password').fill(testUser.password);
|
||||||
|
|
||||||
|
await page.getByRole('button', { name: 'Login' }).click();
|
||||||
|
|
||||||
|
await expect(page.getByRole('heading', { name: 'Dashboard' })).toBeVisible();
|
||||||
});
|
});
|
||||||
|
|
58
e2e/0005-guest-dashboard.spec.ts
Normal file
58
e2e/0005-guest-dashboard.spec.ts
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
import { test, expect } from '@playwright/test';
|
||||||
|
import { appTable } from '@/server/db/schema';
|
||||||
|
import { setSettings } from './helpers/settings';
|
||||||
|
import { loginUser } from './fixtures/fixtures';
|
||||||
|
import { clearDatabase, db } from './helpers/db';
|
||||||
|
|
||||||
|
test.beforeEach(async () => {
|
||||||
|
await clearDatabase();
|
||||||
|
await setSettings({});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('user can activate the guest dashboard and see it when logged out', async ({ page }) => {
|
||||||
|
await loginUser(page);
|
||||||
|
await page.goto('/settings');
|
||||||
|
|
||||||
|
await page.getByRole('tab', { name: 'Settings' }).click();
|
||||||
|
await page.getByLabel('guestDashboard').setChecked(true);
|
||||||
|
await page.getByRole('button', { name: 'Save' }).click();
|
||||||
|
await page.getByTestId('logout-button').click();
|
||||||
|
|
||||||
|
await expect(page.getByText('No apps to display')).toBeVisible();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('logged out users can see the apps on the guest dashboard', async ({ browser }) => {
|
||||||
|
await setSettings({ guestDashboard: true });
|
||||||
|
await db.insert(appTable).values({ config: {}, isVisibleOnGuestDashboard: true, id: 'hello-world', exposed: true, domain: 'duckduckgo.com', status: 'running' });
|
||||||
|
await db.insert(appTable).values({ config: {}, isVisibleOnGuestDashboard: false, id: 'actual-budget', exposed: false, status: 'running' });
|
||||||
|
|
||||||
|
const context = await browser.newContext();
|
||||||
|
const page = await context.newPage();
|
||||||
|
await page.goto('/');
|
||||||
|
await expect(page.getByText(/Hello World web server/)).toBeVisible();
|
||||||
|
const locator = page.locator('text=Actual Budget');
|
||||||
|
expect(locator).not.toBeVisible();
|
||||||
|
|
||||||
|
const [newPage] = await Promise.all([context.waitForEvent('page'), await page.getByRole('link', { name: /Hello World/ }).click()]);
|
||||||
|
|
||||||
|
await newPage.waitForLoadState();
|
||||||
|
expect(newPage.url()).toBe('https://duckduckgo.com/');
|
||||||
|
await newPage.close();
|
||||||
|
|
||||||
|
await context.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('user can deactivate the guest dashboard and not see it when logged out', async ({ page }) => {
|
||||||
|
await loginUser(page);
|
||||||
|
await page.goto('/settings');
|
||||||
|
|
||||||
|
await page.getByRole('tab', { name: 'Settings' }).click();
|
||||||
|
await page.getByLabel('guestDashboard').setChecked(false);
|
||||||
|
await page.getByRole('button', { name: 'Save' }).click();
|
||||||
|
await page.getByTestId('logout-button').click();
|
||||||
|
|
||||||
|
await page.goto('/');
|
||||||
|
|
||||||
|
// We should be redirected to the login page
|
||||||
|
await expect(page.getByRole('heading', { name: 'Login' })).toBeVisible();
|
||||||
|
});
|
|
@ -1,10 +1,12 @@
|
||||||
import { clearDatabase } from './db';
|
import { clearDatabase } from './db';
|
||||||
|
import { setSettings } from './settings';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
async function globalSetup() {
|
async function globalSetup() {
|
||||||
await clearDatabase();
|
await clearDatabase();
|
||||||
|
await setSettings({});
|
||||||
}
|
}
|
||||||
|
|
||||||
export default globalSetup;
|
export default globalSetup;
|
||||||
|
|
8
e2e/helpers/settings.ts
Normal file
8
e2e/helpers/settings.ts
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
import { promises } from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { settingsSchema } from '@runtipi/shared';
|
||||||
|
|
||||||
|
export const setSettings = async (settings: z.infer<typeof settingsSchema>) => {
|
||||||
|
await promises.writeFile(path.join(__dirname, '../../state/settings.json'), JSON.stringify(settings));
|
||||||
|
};
|
|
@ -4,6 +4,9 @@ const nextConfig = {
|
||||||
output: 'standalone',
|
output: 'standalone',
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
transpilePackages: ['@runtipi/shared'],
|
transpilePackages: ['@runtipi/shared'],
|
||||||
|
experimental: {
|
||||||
|
serverComponentsExternalPackages: ['bullmq'],
|
||||||
|
},
|
||||||
serverRuntimeConfig: {
|
serverRuntimeConfig: {
|
||||||
INTERNAL_IP: process.env.INTERNAL_IP,
|
INTERNAL_IP: process.env.INTERNAL_IP,
|
||||||
TIPI_VERSION: process.env.TIPI_VERSION,
|
TIPI_VERSION: process.env.TIPI_VERSION,
|
||||||
|
@ -19,6 +22,14 @@ const nextConfig = {
|
||||||
NODE_ENV: process.env.NODE_ENV,
|
NODE_ENV: process.env.NODE_ENV,
|
||||||
REDIS_HOST: process.env.REDIS_HOST,
|
REDIS_HOST: process.env.REDIS_HOST,
|
||||||
},
|
},
|
||||||
|
async rewrites() {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
source: '/apps/:id',
|
||||||
|
destination: '/app-store/:id',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export default nextConfig;
|
export default nextConfig;
|
||||||
|
|
187
package.json
187
package.json
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "runtipi",
|
"name": "runtipi",
|
||||||
"version": "2.0.3",
|
"version": "2.2.1",
|
||||||
"description": "A homeserver for everyone",
|
"description": "A homeserver for everyone",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"knip": "knip",
|
"knip": "knip",
|
||||||
|
@ -11,9 +11,9 @@
|
||||||
"test:client": "jest --colors --selectProjects client --",
|
"test:client": "jest --colors --selectProjects client --",
|
||||||
"test:server": "jest --colors --selectProjects server --",
|
"test:server": "jest --colors --selectProjects server --",
|
||||||
"test:vite": "dotenv -e .env.test -- vitest run --coverage",
|
"test:vite": "dotenv -e .env.test -- vitest run --coverage",
|
||||||
"dev": "npm run db:migrate && next dev",
|
"dev": "next dev",
|
||||||
"dev:watcher": "pnpm -r --filter cli dev",
|
"dev:watcher": "pnpm -r --filter cli dev",
|
||||||
"db:migrate": "NODE_ENV=development dotenv -e .env -- tsx ./src/server/run-migrations-dev.ts",
|
"db:migrate": "NODE_ENV=development dotenv -e .env.local -- tsx ./src/server/run-migrations-dev.ts",
|
||||||
"lint": "next lint",
|
"lint": "next lint",
|
||||||
"lint:fix": "next lint --fix",
|
"lint:fix": "next lint --fix",
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
|
@ -21,7 +21,7 @@
|
||||||
"start:dev-container": "./.devcontainer/filewatcher.sh && npm run start:dev",
|
"start:dev-container": "./.devcontainer/filewatcher.sh && npm run start:dev",
|
||||||
"start:rc": "docker compose -f docker-compose.rc.yml --env-file .env up --build",
|
"start:rc": "docker compose -f docker-compose.rc.yml --env-file .env up --build",
|
||||||
"start:dev": "npm run prepare && docker compose -f docker-compose.dev.yml up --build",
|
"start:dev": "npm run prepare && docker compose -f docker-compose.dev.yml up --build",
|
||||||
"start:e2e": "./scripts/start-e2e.sh latest",
|
"start:prod": "npm run prepare && docker compose --env-file ./.env -f docker-compose.prod.yml up --build",
|
||||||
"start:pg": "docker run --name test-db -p 5433:5432 -d --rm -e POSTGRES_PASSWORD=postgres postgres:14",
|
"start:pg": "docker run --name test-db -p 5433:5432 -d --rm -e POSTGRES_PASSWORD=postgres postgres:14",
|
||||||
"version": "echo $npm_package_version",
|
"version": "echo $npm_package_version",
|
||||||
"release:rc": "./scripts/deploy/release-rc.sh",
|
"release:rc": "./scripts/deploy/release-rc.sh",
|
||||||
|
@ -32,119 +32,109 @@
|
||||||
"tsc": "tsc"
|
"tsc": "tsc"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hookform/resolvers": "^3.1.1",
|
"@hookform/resolvers": "^3.3.2",
|
||||||
"@otplib/core": "^12.0.1",
|
"@otplib/core": "^12.0.1",
|
||||||
"@otplib/plugin-crypto": "^12.0.1",
|
"@otplib/plugin-crypto": "^12.0.1",
|
||||||
"@otplib/plugin-thirty-two": "^12.0.1",
|
"@otplib/plugin-thirty-two": "^12.0.1",
|
||||||
"@radix-ui/react-dialog": "^1.0.4",
|
"@radix-ui/react-dialog": "^1.0.5",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.0.5",
|
"@radix-ui/react-dropdown-menu": "^2.0.6",
|
||||||
"@radix-ui/react-select": "^1.2.2",
|
"@radix-ui/react-scroll-area": "^1.0.5",
|
||||||
|
"@radix-ui/react-select": "^2.0.0",
|
||||||
"@radix-ui/react-switch": "^1.0.3",
|
"@radix-ui/react-switch": "^1.0.3",
|
||||||
"@radix-ui/react-tabs": "^1.0.4",
|
"@radix-ui/react-tabs": "^1.0.4",
|
||||||
"@runtipi/postgres-migrations": "^5.3.0",
|
"@runtipi/postgres-migrations": "^5.3.0",
|
||||||
"@runtipi/shared": "workspace:^",
|
"@runtipi/shared": "workspace:^",
|
||||||
"@tabler/core": "1.0.0-beta19",
|
"@tabler/core": "1.0.0-beta20",
|
||||||
"@tabler/icons-react": "^2.23.0",
|
"@tabler/icons-react": "^2.42.0",
|
||||||
"@tanstack/react-query": "^4.29.7",
|
"argon2": "^0.31.2",
|
||||||
"@tanstack/react-query-devtools": "^4.29.7",
|
"bullmq": "^4.13.0",
|
||||||
"@trpc/client": "^10.27.1",
|
"clsx": "^2.0.0",
|
||||||
"@trpc/next": "^10.27.1",
|
|
||||||
"@trpc/react-query": "^10.27.1",
|
|
||||||
"@trpc/server": "^10.27.1",
|
|
||||||
"argon2": "^0.30.3",
|
|
||||||
"bullmq": "^4.5.0",
|
|
||||||
"clsx": "^1.1.1",
|
|
||||||
"connect-redis": "^7.1.0",
|
"connect-redis": "^7.1.0",
|
||||||
"cookies-next": "^2.1.2",
|
"drizzle-orm": "^0.28.6",
|
||||||
"drizzle-orm": "^0.27.0",
|
|
||||||
"fs-extra": "^11.1.1",
|
"fs-extra": "^11.1.1",
|
||||||
"isomorphic-fetch": "^3.0.0",
|
"geist": "^1.2.0",
|
||||||
|
"let-it-go": "^1.0.0",
|
||||||
"lodash.merge": "^4.6.2",
|
"lodash.merge": "^4.6.2",
|
||||||
"next": "13.4.7",
|
"next": "14.0.1",
|
||||||
"next-intl": "^2.15.1",
|
"next-client-cookies": "^1.0.6",
|
||||||
"pg": "^8.11.1",
|
"next-intl": "^2.22.1",
|
||||||
|
"next-safe-action": "^5.0.2",
|
||||||
|
"pg": "^8.11.3",
|
||||||
"qrcode.react": "^3.1.0",
|
"qrcode.react": "^3.1.0",
|
||||||
"react": "18.2.0",
|
"react": "18.2.0",
|
||||||
"react-dom": "18.2.0",
|
"react-dom": "18.2.0",
|
||||||
"react-hook-form": "^7.45.1",
|
"react-hook-form": "^7.48.2",
|
||||||
"react-hot-toast": "^2.4.1",
|
"react-hot-toast": "^2.4.1",
|
||||||
"react-markdown": "^8.0.7",
|
"react-markdown": "^9.0.0",
|
||||||
"react-select": "^5.7.3",
|
"react-select": "^5.8.0",
|
||||||
"react-tooltip": "^5.16.1",
|
"react-tooltip": "^5.25.0",
|
||||||
"redaxios": "^0.5.1",
|
"redaxios": "^0.5.1",
|
||||||
"redis": "^4.6.7",
|
"redis": "^4.6.10",
|
||||||
"rehype-raw": "^7.0.0",
|
"rehype-raw": "^7.0.0",
|
||||||
"remark-breaks": "^3.0.3",
|
"remark-breaks": "^4.0.0",
|
||||||
"remark-gfm": "^3.0.1",
|
"remark-gfm": "^4.0.0",
|
||||||
"sass": "^1.63.6",
|
"sass": "^1.69.5",
|
||||||
"semver": "^7.5.3",
|
"semver": "^7.5.4",
|
||||||
"sharp": "0.32.1",
|
"sharp": "0.32.6",
|
||||||
"superjson": "^1.12.3",
|
"swr": "^2.2.4",
|
||||||
"tslib": "^2.5.3",
|
"tslib": "^2.6.2",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.1",
|
||||||
"validator": "^13.7.0",
|
"validator": "^13.11.0",
|
||||||
"winston": "^3.9.0",
|
"winston": "^3.11.0",
|
||||||
"zod": "^3.21.4",
|
"zod": "^3.22.4",
|
||||||
"zustand": "^4.3.8"
|
"zustand": "^4.4.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/core": "^7.22.5",
|
"@babel/core": "^7.23.2",
|
||||||
"@faker-js/faker": "^8.0.2",
|
"@faker-js/faker": "^8.2.0",
|
||||||
"@playwright/test": "^1.35.1",
|
"@playwright/test": "^1.39.0",
|
||||||
"@testing-library/dom": "^9.3.1",
|
"@testing-library/dom": "^9.3.3",
|
||||||
"@testing-library/jest-dom": "^5.16.5",
|
"@testing-library/jest-dom": "^6.1.4",
|
||||||
"@testing-library/react": "^14.0.0",
|
"@testing-library/react": "^14.0.0",
|
||||||
"@testing-library/user-event": "^14.4.3",
|
"@testing-library/user-event": "^14.5.1",
|
||||||
"@total-typescript/shoehorn": "^0.1.1",
|
"@total-typescript/shoehorn": "^0.1.1",
|
||||||
"@total-typescript/ts-reset": "^0.4.2",
|
"@total-typescript/ts-reset": "^0.5.1",
|
||||||
"@types/express": "^4.17.13",
|
"@types/fs-extra": "^11.0.4",
|
||||||
"@types/express-session": "^1.17.7",
|
"@types/jest": "^29.5.11",
|
||||||
"@types/fs-extra": "^11.0.1",
|
"@types/lodash.merge": "^4.6.8",
|
||||||
"@types/isomorphic-fetch": "^0.0.36",
|
"@types/node": "20.8.10",
|
||||||
"@types/jest": "^29.5.2",
|
"@types/pg": "^8.10.7",
|
||||||
"@types/lodash.merge": "^4.6.7",
|
"@types/react": "18.2.39",
|
||||||
"@types/node": "20.3.2",
|
"@types/react-dom": "18.2.14",
|
||||||
"@types/pg": "^8.10.2",
|
"@types/semver": "^7.5.4",
|
||||||
"@types/react": "18.2.14",
|
"@types/uuid": "^9.0.6",
|
||||||
"@types/react-dom": "18.2.6",
|
"@types/validator": "^13.11.5",
|
||||||
"@types/semver": "^7.5.0",
|
"@typescript-eslint/eslint-plugin": "^6.13.1",
|
||||||
"@types/supertest": "^2.0.12",
|
"@typescript-eslint/parser": "^6.10.0",
|
||||||
"@types/testing-library__jest-dom": "^5.14.6",
|
"@vitejs/plugin-react": "^4.1.1",
|
||||||
"@types/uuid": "^9.0.2",
|
"@vitest/coverage-v8": "^0.34.6",
|
||||||
"@types/validator": "^13.7.17",
|
"dotenv-cli": "^7.3.0",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.60.1",
|
"eslint": "8.52.0",
|
||||||
"@typescript-eslint/parser": "^5.60.1",
|
|
||||||
"@vitejs/plugin-react": "^4.0.1",
|
|
||||||
"@vitest/coverage-v8": "^0.32.2",
|
|
||||||
"dotenv-cli": "^7.2.1",
|
|
||||||
"eslint": "8.43.0",
|
|
||||||
"eslint-config-airbnb": "^19.0.4",
|
"eslint-config-airbnb": "^19.0.4",
|
||||||
"eslint-config-airbnb-typescript": "^17.0.0",
|
"eslint-config-airbnb-typescript": "^17.1.0",
|
||||||
"eslint-config-next": "13.4.7",
|
"eslint-config-next": "14.0.3",
|
||||||
"eslint-config-prettier": "^8.8.0",
|
"eslint-config-prettier": "^9.0.0",
|
||||||
"eslint-import-resolver-typescript": "^3.5.5",
|
"eslint-import-resolver-typescript": "^3.6.1",
|
||||||
"eslint-plugin-import": "^2.27.5",
|
"eslint-plugin-import": "^2.29.0",
|
||||||
"eslint-plugin-jest": "^27.2.2",
|
"eslint-plugin-jest": "^27.6.0",
|
||||||
"eslint-plugin-jest-dom": "^5.0.1",
|
"eslint-plugin-jest-dom": "^5.1.0",
|
||||||
"eslint-plugin-jsdoc": "^46.3.0",
|
"eslint-plugin-jsx-a11y": "^6.8.0",
|
||||||
"eslint-plugin-jsx-a11y": "^6.6.1",
|
"eslint-plugin-react": "^7.33.2",
|
||||||
"eslint-plugin-react": "^7.31.10",
|
|
||||||
"eslint-plugin-react-hooks": "^4.6.0",
|
"eslint-plugin-react-hooks": "^4.6.0",
|
||||||
"eslint-plugin-testing-library": "^5.11.0",
|
"eslint-plugin-testing-library": "^6.1.0",
|
||||||
"jest": "^29.5.0",
|
"jest": "^29.7.0",
|
||||||
"jest-environment-jsdom": "^29.5.0",
|
"jest-environment-jsdom": "^29.7.0",
|
||||||
"knip": "^2.19.4",
|
"knip": "^2.41.3",
|
||||||
"memfs": "^4.2.0",
|
"memfs": "^4.6.0",
|
||||||
"msw": "^1.2.2",
|
"msw": "^1.3.2",
|
||||||
"next-router-mock": "^0.9.7",
|
"next-router-mock": "^0.9.10",
|
||||||
"prettier": "^2.8.8",
|
"prettier": "^3.0.3",
|
||||||
"supertest": "^6.3.3",
|
"ts-jest": "^29.1.1",
|
||||||
"ts-jest": "^29.1.0",
|
|
||||||
"ts-node": "^10.9.1",
|
"ts-node": "^10.9.1",
|
||||||
"tsx": "^3.12.7",
|
"tsx": "^3.14.0",
|
||||||
"typescript": "5.1.5",
|
"typescript": "5.2.2",
|
||||||
"vite-tsconfig-paths": "^4.2.0",
|
"vite-tsconfig-paths": "^4.2.1",
|
||||||
"vitest": "^0.32.2",
|
"vitest": "^0.34.6",
|
||||||
"wait-for-expect": "^3.0.2"
|
"wait-for-expect": "^3.0.2"
|
||||||
},
|
},
|
||||||
"msw": {
|
"msw": {
|
||||||
|
@ -152,12 +142,15 @@
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/meienberger/runtipi.git"
|
"url": "git+https://github.com/runtipi/runtipi.git"
|
||||||
},
|
},
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "GNU General Public License v3.0",
|
"license": "GNU General Public License v3.0",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/meienberger/runtipi/issues"
|
"url": "https://github.com/runtipi/runtipi/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/meienberger/runtipi#readme"
|
"homepage": "https://github.com/runtipi/runtipi#readme",
|
||||||
|
"pnpm": {
|
||||||
|
"patchedDependencies": {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,3 +6,8 @@ ROOT_FOLDER_HOST=/runtipi
|
||||||
STORAGE_PATH=/runtipi
|
STORAGE_PATH=/runtipi
|
||||||
TIPI_VERSION=1
|
TIPI_VERSION=1
|
||||||
REDIS_PASSWORD=redis
|
REDIS_PASSWORD=redis
|
||||||
|
POSTGRES_HOST=localhost
|
||||||
|
POSTGRES_DBNAME=postgres
|
||||||
|
POSTGRES_USERNAME=postgres
|
||||||
|
POSTGRES_PASSWORD=postgres
|
||||||
|
POSTGRES_PORT=5433
|
||||||
|
|
|
@ -4,10 +4,12 @@ services:
|
||||||
tipi-reverse-proxy:
|
tipi-reverse-proxy:
|
||||||
container_name: tipi-reverse-proxy
|
container_name: tipi-reverse-proxy
|
||||||
image: traefik:v2.8
|
image: traefik:v2.8
|
||||||
restart: on-failure
|
restart: unless-stopped
|
||||||
|
depends_on:
|
||||||
|
- tipi-dashboard
|
||||||
ports:
|
ports:
|
||||||
- ${NGINX_PORT-80}:80
|
- ${NGINX_PORT:-80}:80
|
||||||
- ${NGINX_PORT_SSL-443}:443
|
- ${NGINX_PORT_SSL:-443}:443
|
||||||
command: --providers.docker
|
command: --providers.docker
|
||||||
volumes:
|
volumes:
|
||||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
@ -19,10 +21,10 @@ services:
|
||||||
tipi-db:
|
tipi-db:
|
||||||
container_name: tipi-db
|
container_name: tipi-db
|
||||||
image: postgres:14
|
image: postgres:14
|
||||||
restart: on-failure
|
restart: unless-stopped
|
||||||
stop_grace_period: 1m
|
stop_grace_period: 1m
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- ${POSTGRES_PORT:-5432}:5432
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/postgres:/var/lib/postgresql/data
|
- ./data/postgres:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
|
@ -40,7 +42,7 @@ services:
|
||||||
tipi-redis:
|
tipi-redis:
|
||||||
container_name: tipi-redis
|
container_name: tipi-redis
|
||||||
image: redis:7.2.0
|
image: redis:7.2.0
|
||||||
restart: on-failure
|
restart: unless-stopped
|
||||||
command: redis-server --requirepass ${REDIS_PASSWORD}
|
command: redis-server --requirepass ${REDIS_PASSWORD}
|
||||||
ports:
|
ports:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
|
@ -54,12 +56,16 @@ services:
|
||||||
networks:
|
networks:
|
||||||
- tipi_main_network
|
- tipi_main_network
|
||||||
|
|
||||||
tipi-dashboard:
|
tipi-worker:
|
||||||
image: meienberger/runtipi:${TIPI_VERSION}
|
container_name: tipi-worker
|
||||||
restart: on-failure
|
image: ghcr.io/runtipi/worker:${TIPI_VERSION}
|
||||||
container_name: tipi-dashboard
|
restart: unless-stopped
|
||||||
networks:
|
healthcheck:
|
||||||
- tipi_main_network
|
test: ['CMD', 'curl', '-f', 'http://localhost:3000/healthcheck']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 120
|
||||||
|
start_period: 5s
|
||||||
depends_on:
|
depends_on:
|
||||||
tipi-db:
|
tipi-db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
@ -70,13 +76,46 @@ services:
|
||||||
environment:
|
environment:
|
||||||
NODE_ENV: production
|
NODE_ENV: production
|
||||||
volumes:
|
volumes:
|
||||||
- ./.env:/runtipi/.env
|
# Core
|
||||||
|
- /proc:/host/proc
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
# App
|
||||||
|
- ./.env:/app/.env
|
||||||
|
- ./state:/app/state
|
||||||
|
- ./repos:/app/repos
|
||||||
|
- ./apps:/app/apps
|
||||||
|
- ./logs:/app/logs
|
||||||
|
- ./traefik:/app/traefik
|
||||||
|
- ./user-config:/app/user-config
|
||||||
|
- ./media:/app/media
|
||||||
|
- ${STORAGE_PATH:-.}:/storage
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
|
||||||
|
tipi-dashboard:
|
||||||
|
image: ghcr.io/runtipi/runtipi:${TIPI_VERSION}
|
||||||
|
restart: unless-stopped
|
||||||
|
container_name: tipi-dashboard
|
||||||
|
networks:
|
||||||
|
- tipi_main_network
|
||||||
|
depends_on:
|
||||||
|
tipi-db:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-redis:
|
||||||
|
condition: service_healthy
|
||||||
|
tipi-worker:
|
||||||
|
condition: service_healthy
|
||||||
|
volumes:
|
||||||
|
- ./.env:/runtipi/.env:ro
|
||||||
- ./state:/runtipi/state
|
- ./state:/runtipi/state
|
||||||
- ./repos:/runtipi/repos:ro
|
- ./repos:/runtipi/repos:ro
|
||||||
- ./apps:/runtipi/apps
|
- ./apps:/runtipi/apps
|
||||||
- ./logs:/app/logs
|
- ./logs:/app/logs
|
||||||
- ./traefik:/runtipi/traefik
|
- ${STORAGE_PATH:-.}:/app/storage
|
||||||
- ${STORAGE_PATH}:/app/storage
|
env_file:
|
||||||
|
- .env
|
||||||
|
environment:
|
||||||
|
NODE_ENV: production
|
||||||
labels:
|
labels:
|
||||||
# Main
|
# Main
|
||||||
traefik.enable: true
|
traefik.enable: true
|
||||||
|
|
|
@ -8,7 +8,7 @@ async function bundle() {
|
||||||
entryPoints: ['./src/index.ts'],
|
entryPoints: ['./src/index.ts'],
|
||||||
outfile: './dist/index.js',
|
outfile: './dist/index.js',
|
||||||
platform: 'node',
|
platform: 'node',
|
||||||
target: 'node18',
|
target: 'node20',
|
||||||
bundle: true,
|
bundle: true,
|
||||||
color: true,
|
color: true,
|
||||||
sourcemap: commandArgs.includes('--sourcemap'),
|
sourcemap: commandArgs.includes('--sourcemap'),
|
||||||
|
|
|
@ -1,26 +1,27 @@
|
||||||
{
|
{
|
||||||
"name": "@runtipi/cli",
|
"name": "@runtipi/cli",
|
||||||
"version": "2.0.3",
|
"version": "2.1.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"bin": "dist/index.js",
|
"bin": "dist/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "dotenv -e .env.test vitest -- --coverage --watch=false",
|
"test": "dotenv -e .env.test vitest -- --coverage --watch=false --passWithNoTests",
|
||||||
"test:watch": "dotenv -e .env.test vitest",
|
"test:watch": "dotenv -e .env.test vitest",
|
||||||
"package": "npm run build && pkg package.json && chmod +x dist/bin/cli-x64 && chmod +x dist/bin/cli-arm64",
|
"package": "npm run build && pkg package.json && chmod +x dist/bin/cli-x64 && chmod +x dist/bin/cli-arm64",
|
||||||
"package:m1": "npm run build && pkg package.json -t node18-darwin-arm64",
|
"package:m1": "npm run build && pkg package.json -t node20-darwin-arm64",
|
||||||
"set-version": "node -e \"require('fs').writeFileSync('assets/VERSION', process.argv[1])\"",
|
"set-version": "node -e \"require('fs').writeFileSync('assets/VERSION', process.argv[1])\"",
|
||||||
"build": "node build.js",
|
"build": "node build.js",
|
||||||
"build:meta": "esbuild ./src/index.ts --bundle --platform=node --target=node18 --outfile=dist/index.js --metafile=meta.json --analyze",
|
"build:meta": "esbuild ./src/index.ts --bundle --platform=node --target=node20 --outfile=dist/index.js --metafile=meta.json --analyze",
|
||||||
"dev": "dotenv -e ../../.env nodemon",
|
"dev": "dotenv -e ../../.env nodemon",
|
||||||
"lint": "eslint . --ext .ts",
|
"lint": "eslint . --ext .ts",
|
||||||
"tsc": "tsc --noEmit"
|
"tsc": "tsc --noEmit",
|
||||||
|
"knip": "knip"
|
||||||
},
|
},
|
||||||
"pkg": {
|
"pkg": {
|
||||||
"assets": "assets/**/*",
|
"assets": "assets/**/*",
|
||||||
"targets": [
|
"targets": [
|
||||||
"node18-linux-x64",
|
"node20-linux-x64",
|
||||||
"node18-linux-arm64"
|
"node20-linux-arm64"
|
||||||
],
|
],
|
||||||
"outputPath": "dist/bin"
|
"outputPath": "dist/bin"
|
||||||
},
|
},
|
||||||
|
@ -28,36 +29,32 @@
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@faker-js/faker": "^8.0.2",
|
"@faker-js/faker": "^8.2.0",
|
||||||
"@types/cli-progress": "^3.11.0",
|
"@types/cli-progress": "^3.11.5",
|
||||||
"@types/node": "20.3.2",
|
"@types/node": "20.8.10",
|
||||||
"@types/web-push": "^3.3.2",
|
"dotenv-cli": "^7.3.0",
|
||||||
"dotenv-cli": "^7.2.1",
|
"esbuild": "^0.19.4",
|
||||||
"esbuild": "^0.16.17",
|
"eslint-config-prettier": "^9.0.0",
|
||||||
"eslint-config-prettier": "^8.8.0",
|
"knip": "^2.41.3",
|
||||||
"memfs": "^4.2.0",
|
"memfs": "^4.6.0",
|
||||||
"nodemon": "^2.0.22",
|
"nodemon": "^3.0.1",
|
||||||
"pkg": "^5.8.1",
|
"pkg": "^5.8.1",
|
||||||
"vite": "^4.4.7",
|
"vite": "^4.5.0",
|
||||||
"vite-tsconfig-paths": "^4.2.0",
|
"vite-tsconfig-paths": "^4.2.1",
|
||||||
"vitest": "^0.32.2"
|
"vitest": "^0.34.6"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@runtipi/postgres-migrations": "^5.3.0",
|
|
||||||
"@runtipi/shared": "workspace:^",
|
"@runtipi/shared": "workspace:^",
|
||||||
"axios": "^1.4.0",
|
"axios": "^1.6.0",
|
||||||
"boxen": "^7.1.1",
|
"boxen": "^7.1.1",
|
||||||
"bullmq": "^4.5.0",
|
"bullmq": "^4.13.0",
|
||||||
"chalk": "^5.3.0",
|
"chalk": "^5.3.0",
|
||||||
"cli-progress": "^3.12.0",
|
"cli-progress": "^3.12.0",
|
||||||
"cli-spinners": "^2.9.0",
|
"cli-spinners": "^2.9.1",
|
||||||
"commander": "^11.0.0",
|
"commander": "^11.1.0",
|
||||||
"dotenv": "^16.3.1",
|
"dotenv": "^16.3.1",
|
||||||
"log-update": "^5.0.1",
|
"log-update": "^5.0.1",
|
||||||
"pg": "^8.11.1",
|
"semver": "^7.5.4",
|
||||||
"semver": "^7.5.3",
|
"zod": "^3.22.4"
|
||||||
"systeminformation": "^5.18.7",
|
|
||||||
"web-push": "^3.6.3",
|
|
||||||
"zod": "^3.21.4"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,297 +1,73 @@
|
||||||
/* eslint-disable no-await-in-loop */
|
import { Queue, QueueEvents } from 'bullmq';
|
||||||
/* eslint-disable no-restricted-syntax */
|
import { SystemEvent, eventSchema } from '@runtipi/shared';
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
|
||||||
import { exec } from 'child_process';
|
|
||||||
import { promisify } from 'util';
|
|
||||||
import pg from 'pg';
|
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
import { getEnv } from '@/utils/environment/environment';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
import { logger } from '@/utils/logger/logger';
|
||||||
import { compose } from '@/utils/docker-helpers';
|
|
||||||
import { copyDataDir, generateEnvFile } from './app.helpers';
|
|
||||||
import { fileLogger } from '@/utils/logger/file-logger';
|
|
||||||
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
const getDbClient = async () => {
|
|
||||||
const { postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getEnv();
|
|
||||||
|
|
||||||
const client = new pg.Client({
|
|
||||||
host: '127.0.0.1',
|
|
||||||
database: postgresDatabase,
|
|
||||||
user: postgresUsername,
|
|
||||||
password: postgresPassword,
|
|
||||||
port: Number(postgresPort),
|
|
||||||
});
|
|
||||||
|
|
||||||
await client.connect();
|
|
||||||
|
|
||||||
return client;
|
|
||||||
};
|
|
||||||
|
|
||||||
export class AppExecutors {
|
export class AppExecutors {
|
||||||
private readonly logger;
|
private readonly logger;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.logger = fileLogger;
|
this.logger = logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
private handleAppError = (err: unknown) => {
|
private getQueue = () => {
|
||||||
if (err instanceof Error) {
|
const { redisPassword } = getEnv();
|
||||||
this.logger.error(`An error occurred: ${err.message}`);
|
const queue = new Queue('events', { connection: { host: '127.0.0.1', port: 6379, password: redisPassword } });
|
||||||
return { success: false, message: err.message };
|
const queueEvents = new QueueEvents('events', { connection: { host: '127.0.0.1', port: 6379, password: redisPassword } });
|
||||||
}
|
|
||||||
|
|
||||||
return { success: false, message: `An error occurred: ${err}` };
|
return { queue, queueEvents };
|
||||||
};
|
};
|
||||||
|
|
||||||
private getAppPaths = (appId: string) => {
|
private generateJobId = (event: Record<string, unknown>) => {
|
||||||
const { rootFolderHost, storagePath, appsRepoId } = getEnv();
|
const { appId, action } = event;
|
||||||
|
return `${appId}-${action}`;
|
||||||
const appDataDirPath = path.join(storagePath, 'app-data', appId);
|
|
||||||
const appDirPath = path.join(rootFolderHost, 'apps', appId);
|
|
||||||
const configJsonPath = path.join(appDirPath, 'config.json');
|
|
||||||
const repoPath = path.join(rootFolderHost, 'repos', appsRepoId, 'apps', appId);
|
|
||||||
|
|
||||||
return { appDataDirPath, appDirPath, configJsonPath, repoPath };
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given an app id, ensures that the app folder exists in the apps folder
|
|
||||||
* If not, copies the app folder from the repo
|
|
||||||
* @param {string} appId - App id
|
|
||||||
*/
|
|
||||||
private ensureAppDir = async (appId: string) => {
|
|
||||||
const { rootFolderHost } = getEnv();
|
|
||||||
|
|
||||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
|
||||||
const dockerFilePath = path.join(rootFolderHost, 'apps', appId, 'docker-compose.yml');
|
|
||||||
|
|
||||||
if (!(await pathExists(dockerFilePath))) {
|
|
||||||
// delete eventual app folder if exists
|
|
||||||
this.logger.info(`Deleting app ${appId} folder if exists`);
|
|
||||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
|
||||||
|
|
||||||
// Copy app folder from repo
|
|
||||||
this.logger.info(`Copying app ${appId} from repo ${getEnv().appsRepoId}`);
|
|
||||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Install an app from the repo
|
|
||||||
* @param {string} appId - The id of the app to install
|
|
||||||
* @param {Record<string, unknown>} config - The config of the app
|
|
||||||
*/
|
|
||||||
public installApp = async (appId: string, config: Record<string, unknown>) => {
|
|
||||||
try {
|
|
||||||
if (process.getuid && process.getgid) {
|
|
||||||
this.logger.info(`Installing app ${appId} as User ID: ${process.getuid()}, Group ID: ${process.getgid()}`);
|
|
||||||
} else {
|
|
||||||
this.logger.info(`Installing app ${appId}. No User ID or Group ID found.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { rootFolderHost, appsRepoId } = getEnv();
|
|
||||||
|
|
||||||
const { appDirPath, repoPath, appDataDirPath } = this.getAppPaths(appId);
|
|
||||||
|
|
||||||
// Check if app exists in repo
|
|
||||||
const apps = await fs.promises.readdir(path.join(rootFolderHost, 'repos', appsRepoId, 'apps'));
|
|
||||||
|
|
||||||
if (!apps.includes(appId)) {
|
|
||||||
this.logger.error(`App ${appId} not found in repo ${appsRepoId}`);
|
|
||||||
return { success: false, message: `App ${appId} not found in repo ${appsRepoId}` };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete app folder if exists
|
|
||||||
this.logger.info(`Deleting folder ${appDirPath} if exists`);
|
|
||||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
|
||||||
|
|
||||||
// Create app folder
|
|
||||||
this.logger.info(`Creating folder ${appDirPath}`);
|
|
||||||
await fs.promises.mkdir(appDirPath, { recursive: true });
|
|
||||||
|
|
||||||
// Copy app folder from repo
|
|
||||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
|
||||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
|
||||||
|
|
||||||
// Create folder app-data folder
|
|
||||||
this.logger.info(`Creating folder ${appDataDirPath}`);
|
|
||||||
await fs.promises.mkdir(appDataDirPath, { recursive: true });
|
|
||||||
|
|
||||||
// Create app.env file
|
|
||||||
this.logger.info(`Creating app.env file for app ${appId}`);
|
|
||||||
await generateEnvFile(appId, config);
|
|
||||||
|
|
||||||
// Copy data dir
|
|
||||||
this.logger.info(`Copying data dir for app ${appId}`);
|
|
||||||
if (!(await pathExists(`${appDataDirPath}/data`))) {
|
|
||||||
await copyDataDir(appId);
|
|
||||||
}
|
|
||||||
|
|
||||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
|
||||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
// run docker-compose up
|
|
||||||
this.logger.info(`Running docker-compose up for app ${appId}`);
|
|
||||||
await compose(appId, 'up -d');
|
|
||||||
|
|
||||||
this.logger.info(`Docker-compose up for app ${appId} finished`);
|
|
||||||
|
|
||||||
return { success: true, message: `App ${appId} installed successfully` };
|
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stops an app
|
* Stops an app
|
||||||
* @param {string} appId - The id of the app to stop
|
* @param {string} appId - The id of the app to stop
|
||||||
* @param {Record<string, unknown>} config - The config of the app
|
|
||||||
*/
|
*/
|
||||||
public stopApp = async (appId: string, config: Record<string, unknown>, skipEnvGeneration = false) => {
|
public stopApp = async (appId: string) => {
|
||||||
try {
|
const spinner = new TerminalSpinner(`Stopping app ${appId}`);
|
||||||
this.logger.info(`Stopping app ${appId}`);
|
spinner.start();
|
||||||
|
|
||||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
const jobid = this.generateJobId({ appId, action: 'stop' });
|
||||||
await this.ensureAppDir(appId);
|
|
||||||
|
|
||||||
if (!skipEnvGeneration) {
|
const { queue, queueEvents } = this.getQueue();
|
||||||
await generateEnvFile(appId, config);
|
const event = { type: 'app', command: 'stop', appid: appId, form: {}, skipEnv: true } satisfies SystemEvent;
|
||||||
}
|
const job = await queue.add(jobid, eventSchema.parse(event));
|
||||||
await compose(appId, 'rm --force --stop');
|
const result = await job.waitUntilFinished(queueEvents, 1000 * 60 * 5);
|
||||||
|
|
||||||
this.logger.info(`App ${appId} stopped`);
|
await queueEvents.close();
|
||||||
return { success: true, message: `App ${appId} stopped successfully` };
|
await queue.close();
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
if (!result?.success) {
|
||||||
|
this.logger.error(result?.message);
|
||||||
|
spinner.fail(`Failed to stop app ${appId} see logs for more details (logs/error.log)`);
|
||||||
|
} else {
|
||||||
|
spinner.done(`App ${appId} stopped`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public startApp = async (appId: string, config: Record<string, unknown>) => {
|
public startApp = async (appId: string) => {
|
||||||
try {
|
const spinner = new TerminalSpinner(`Starting app ${appId}`);
|
||||||
const { appDataDirPath } = this.getAppPaths(appId);
|
spinner.start();
|
||||||
|
|
||||||
this.logger.info(`Starting app ${appId}`);
|
const jobid = this.generateJobId({ appId, action: 'start' });
|
||||||
|
|
||||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
const { queue, queueEvents } = this.getQueue();
|
||||||
await this.ensureAppDir(appId);
|
const event = { type: 'app', command: 'start', appid: appId, form: {}, skipEnv: true } satisfies SystemEvent;
|
||||||
await generateEnvFile(appId, config);
|
const job = await queue.add(jobid, eventSchema.parse(event));
|
||||||
await compose(appId, 'up --detach --force-recreate --remove-orphans');
|
const result = await job.waitUntilFinished(queueEvents, 1000 * 60 * 5);
|
||||||
|
|
||||||
this.logger.info(`App ${appId} started`);
|
await queueEvents.close();
|
||||||
|
await queue.close();
|
||||||
|
|
||||||
this.logger.info(`Setting permissions for app ${appId}`);
|
if (!result.success) {
|
||||||
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
spinner.fail(`Failed to start app ${appId} see logs for more details (logs/error.log)`);
|
||||||
this.logger.error(`Error setting permissions for app ${appId}`);
|
} else {
|
||||||
});
|
spinner.done(`App ${appId} started`);
|
||||||
|
|
||||||
return { success: true, message: `App ${appId} started successfully` };
|
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public uninstallApp = async (appId: string, config: Record<string, unknown>) => {
|
|
||||||
try {
|
|
||||||
const { appDirPath, appDataDirPath } = this.getAppPaths(appId);
|
|
||||||
this.logger.info(`Uninstalling app ${appId}`);
|
|
||||||
|
|
||||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
|
||||||
await this.ensureAppDir(appId);
|
|
||||||
await generateEnvFile(appId, config);
|
|
||||||
await compose(appId, 'down --remove-orphans --volumes --rmi all');
|
|
||||||
|
|
||||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
|
||||||
await fs.promises.rm(appDirPath, { recursive: true, force: true }).catch((err) => {
|
|
||||||
this.logger.error(`Error deleting folder ${appDirPath}: ${err.message}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.logger.info(`Deleting folder ${appDataDirPath}`);
|
|
||||||
await fs.promises.rm(appDataDirPath, { recursive: true, force: true }).catch((err) => {
|
|
||||||
this.logger.error(`Error deleting folder ${appDataDirPath}: ${err.message}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.logger.info(`App ${appId} uninstalled`);
|
|
||||||
return { success: true, message: `App ${appId} uninstalled successfully` };
|
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public updateApp = async (appId: string, config: Record<string, unknown>) => {
|
|
||||||
try {
|
|
||||||
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
|
||||||
this.logger.info(`Updating app ${appId}`);
|
|
||||||
await this.ensureAppDir(appId);
|
|
||||||
await generateEnvFile(appId, config);
|
|
||||||
|
|
||||||
await compose(appId, 'up --detach --force-recreate --remove-orphans');
|
|
||||||
await compose(appId, 'down --rmi all --remove-orphans');
|
|
||||||
|
|
||||||
this.logger.info(`Deleting folder ${appDirPath}`);
|
|
||||||
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
|
||||||
|
|
||||||
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
|
||||||
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
|
||||||
|
|
||||||
await compose(appId, 'pull');
|
|
||||||
|
|
||||||
return { success: true, message: `App ${appId} updated successfully` };
|
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public regenerateAppEnv = async (appId: string, config: Record<string, unknown>) => {
|
|
||||||
try {
|
|
||||||
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
|
||||||
await this.ensureAppDir(appId);
|
|
||||||
await generateEnvFile(appId, config);
|
|
||||||
return { success: true, message: `App ${appId} env file regenerated successfully` };
|
|
||||||
} catch (err) {
|
|
||||||
return this.handleAppError(err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start all apps with status running
|
|
||||||
*/
|
|
||||||
public startAllApps = async () => {
|
|
||||||
const spinner = new TerminalSpinner('Starting apps...');
|
|
||||||
const client = await getDbClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Get all apps with status running
|
|
||||||
const { rows } = await client.query(`SELECT * FROM app WHERE status = 'running'`);
|
|
||||||
|
|
||||||
// Update all apps with status different than running or stopped to stopped
|
|
||||||
await client.query(`UPDATE app SET status = 'stopped' WHERE status != 'stopped' AND status != 'running' AND status != 'missing'`);
|
|
||||||
|
|
||||||
// Start all apps
|
|
||||||
for (const row of rows) {
|
|
||||||
spinner.setMessage(`Starting app ${row.id}`);
|
|
||||||
spinner.start();
|
|
||||||
const { id, config } = row;
|
|
||||||
|
|
||||||
const { success } = await this.startApp(id, config);
|
|
||||||
|
|
||||||
if (!success) {
|
|
||||||
this.logger.error(`Error starting app ${id}`);
|
|
||||||
await client.query(`UPDATE app SET status = 'stopped' WHERE id = '${id}'`);
|
|
||||||
spinner.fail(`Error starting app ${id}`);
|
|
||||||
} else {
|
|
||||||
await client.query(`UPDATE app SET status = 'running' WHERE id = '${id}'`);
|
|
||||||
spinner.done(`App ${id} started`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
this.logger.error(`Error starting apps: ${err}`);
|
|
||||||
spinner.fail(`Error starting apps see logs for details (logs/error.log)`);
|
|
||||||
} finally {
|
|
||||||
await client.end();
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,2 @@
|
||||||
export { AppExecutors } from './app/app.executors';
|
export { AppExecutors } from './app/app.executors';
|
||||||
export { RepoExecutors } from './repo/repo.executors';
|
|
||||||
export { SystemExecutors } from './system/system.executors';
|
export { SystemExecutors } from './system/system.executors';
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
import crypto from 'crypto';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given a repo url, return a hash of it to be used as a folder name
|
|
||||||
*
|
|
||||||
* @param {string} repoUrl
|
|
||||||
*/
|
|
||||||
export const getRepoHash = (repoUrl: string) => {
|
|
||||||
const hash = crypto.createHash('sha256');
|
|
||||||
hash.update(repoUrl);
|
|
||||||
return hash.digest('hex');
|
|
||||||
};
|
|
|
@ -1,30 +1,21 @@
|
||||||
/* eslint-disable no-restricted-syntax */
|
/* eslint-disable no-restricted-syntax */
|
||||||
/* eslint-disable no-await-in-loop */
|
/* eslint-disable no-await-in-loop */
|
||||||
import { Queue } from 'bullmq';
|
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import cliProgress from 'cli-progress';
|
import cliProgress from 'cli-progress';
|
||||||
import semver from 'semver';
|
import semver from 'semver';
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import boxen from 'boxen';
|
import boxen from 'boxen';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { exec, spawn } from 'child_process';
|
import { spawn } from 'child_process';
|
||||||
import si from 'systeminformation';
|
|
||||||
import { Stream } from 'stream';
|
import { Stream } from 'stream';
|
||||||
import { promisify } from 'util';
|
|
||||||
import dotenv from 'dotenv';
|
import dotenv from 'dotenv';
|
||||||
import { SystemEvent } from '@runtipi/shared';
|
import { pathExists } from '@runtipi/shared';
|
||||||
import chalk from 'chalk';
|
|
||||||
import { killOtherWorkers } from 'src/services/watcher/watcher';
|
|
||||||
import { AppExecutors } from '../app/app.executors';
|
import { AppExecutors } from '../app/app.executors';
|
||||||
import { copySystemFiles, generateSystemEnvFile, generateTlsCertificates } from './system.helpers';
|
import { copySystemFiles, generateSystemEnvFile } from './system.helpers';
|
||||||
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
import { TerminalSpinner } from '@/utils/logger/terminal-spinner';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
import { getEnv } from '@/utils/environment/environment';
|
||||||
import { fileLogger } from '@/utils/logger/file-logger';
|
import { logger } from '@/utils/logger/logger';
|
||||||
import { runPostgresMigrations } from '@/utils/migrations/run-migration';
|
import { execAsync } from '@/utils/exec-async/execAsync';
|
||||||
import { getUserIds } from '@/utils/environment/user';
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
export class SystemExecutors {
|
export class SystemExecutors {
|
||||||
private readonly rootFolder: string;
|
private readonly rootFolder: string;
|
||||||
|
@ -35,7 +26,7 @@ export class SystemExecutors {
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.rootFolder = process.cwd();
|
this.rootFolder = process.cwd();
|
||||||
this.logger = fileLogger;
|
this.logger = logger;
|
||||||
|
|
||||||
this.envFile = path.join(this.rootFolder, '.env');
|
this.envFile = path.join(this.rootFolder, '.env');
|
||||||
}
|
}
|
||||||
|
@ -50,67 +41,9 @@ export class SystemExecutors {
|
||||||
return { success: false, message: `An error occurred: ${err}` };
|
return { success: false, message: `An error occurred: ${err}` };
|
||||||
};
|
};
|
||||||
|
|
||||||
private getSystemLoad = async () => {
|
|
||||||
const { currentLoad } = await si.currentLoad();
|
|
||||||
const mem = await si.mem();
|
|
||||||
const [disk0] = await si.fsSize();
|
|
||||||
|
|
||||||
return {
|
|
||||||
cpu: { load: currentLoad },
|
|
||||||
memory: { total: mem.total, used: mem.used, available: mem.available },
|
|
||||||
disk: { total: disk0?.size, used: disk0?.used, available: disk0?.available },
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
private ensureFilePermissions = async (rootFolderHost: string) => {
|
|
||||||
const logger = new TerminalSpinner('');
|
|
||||||
|
|
||||||
const filesAndFolders = [
|
|
||||||
path.join(rootFolderHost, 'apps'),
|
|
||||||
path.join(rootFolderHost, 'logs'),
|
|
||||||
path.join(rootFolderHost, 'media'),
|
|
||||||
path.join(rootFolderHost, 'repos'),
|
|
||||||
path.join(rootFolderHost, 'state'),
|
|
||||||
path.join(rootFolderHost, 'traefik'),
|
|
||||||
path.join(rootFolderHost, '.env'),
|
|
||||||
path.join(rootFolderHost, 'VERSION'),
|
|
||||||
path.join(rootFolderHost, 'docker-compose.yml'),
|
|
||||||
];
|
|
||||||
|
|
||||||
const files600 = [path.join(rootFolderHost, 'traefik', 'shared', 'acme.json')];
|
|
||||||
|
|
||||||
this.logger.info('Setting file permissions a+rwx on required files');
|
|
||||||
// Give permission to read and write to all files and folders for the current user
|
|
||||||
for (const fileOrFolder of filesAndFolders) {
|
|
||||||
if (await pathExists(fileOrFolder)) {
|
|
||||||
this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
|
||||||
await execAsync(`chmod -R a+rwx ${fileOrFolder}`).catch(() => {
|
|
||||||
logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
|
||||||
});
|
|
||||||
this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.info('Setting file permissions 600 on required files');
|
|
||||||
|
|
||||||
for (const fileOrFolder of files600) {
|
|
||||||
if (await pathExists(fileOrFolder)) {
|
|
||||||
this.logger.info(`Setting permissions on ${fileOrFolder}`);
|
|
||||||
await execAsync(`chmod 600 ${fileOrFolder}`).catch(() => {
|
|
||||||
logger.fail(`Failed to set permissions on ${fileOrFolder}`);
|
|
||||||
});
|
|
||||||
this.logger.info(`Successfully set permissions on ${fileOrFolder}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public cleanLogs = async () => {
|
public cleanLogs = async () => {
|
||||||
try {
|
try {
|
||||||
const { rootFolderHost } = getEnv();
|
await this.logger.flush();
|
||||||
|
|
||||||
await fs.promises.rm(path.join(rootFolderHost, 'logs'), { recursive: true, force: true });
|
|
||||||
await fs.promises.mkdir(path.join(rootFolderHost, 'logs'));
|
|
||||||
|
|
||||||
this.logger.info('Logs cleaned successfully');
|
this.logger.info('Logs cleaned successfully');
|
||||||
|
|
||||||
return { success: true, message: '' };
|
return { success: true, message: '' };
|
||||||
|
@ -119,20 +52,6 @@ export class SystemExecutors {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public systemInfo = async () => {
|
|
||||||
try {
|
|
||||||
const { rootFolderHost } = getEnv();
|
|
||||||
const systemLoad = await this.getSystemLoad();
|
|
||||||
|
|
||||||
await fs.promises.writeFile(path.join(rootFolderHost, 'state', 'system-info.json'), JSON.stringify(systemLoad, null, 2));
|
|
||||||
await fs.promises.chmod(path.join(rootFolderHost, 'state', 'system-info.json'), 0o777);
|
|
||||||
|
|
||||||
return { success: true, message: '' };
|
|
||||||
} catch (e) {
|
|
||||||
return this.handleSystemError(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method will stop Tipi
|
* This method will stop Tipi
|
||||||
* It will stop all the apps and then stop the main containers.
|
* It will stop all the apps and then stop the main containers.
|
||||||
|
@ -149,7 +68,7 @@ export class SystemExecutors {
|
||||||
for (const app of apps) {
|
for (const app of apps) {
|
||||||
spinner.setMessage(`Stopping ${app}...`);
|
spinner.setMessage(`Stopping ${app}...`);
|
||||||
spinner.start();
|
spinner.start();
|
||||||
await appExecutor.stopApp(app, {}, true);
|
await appExecutor.stopApp(app);
|
||||||
spinner.done(`${app} stopped`);
|
spinner.done(`${app} stopped`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -173,52 +92,30 @@ export class SystemExecutors {
|
||||||
* This method will start Tipi.
|
* This method will start Tipi.
|
||||||
* It will copy the system files, generate the system env file, pull the images and start the containers.
|
* It will copy the system files, generate the system env file, pull the images and start the containers.
|
||||||
*/
|
*/
|
||||||
public start = async (sudo = true, killWatchers = true) => {
|
public start = async () => {
|
||||||
const spinner = new TerminalSpinner('Starting Tipi...');
|
const spinner = new TerminalSpinner('Starting Tipi...');
|
||||||
try {
|
try {
|
||||||
const { isSudo } = getUserIds();
|
await this.logger.flush();
|
||||||
|
|
||||||
if (!sudo) {
|
|
||||||
console.log(
|
|
||||||
boxen(
|
|
||||||
"You are running in sudoless mode. While Tipi should work as expected, you'll probably run into permission issues and will have to manually fix them. We recommend running Tipi with sudo for beginners.",
|
|
||||||
{
|
|
||||||
title: '⛔️Sudoless mode',
|
|
||||||
titleAlignment: 'center',
|
|
||||||
textAlignment: 'center',
|
|
||||||
padding: 1,
|
|
||||||
borderStyle: 'double',
|
|
||||||
borderColor: 'red',
|
|
||||||
margin: { top: 1, bottom: 1 },
|
|
||||||
width: 80,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.info('Killing other workers...');
|
|
||||||
|
|
||||||
if (killWatchers) {
|
|
||||||
await killOtherWorkers();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isSudo && sudo) {
|
|
||||||
console.log(chalk.red('Tipi needs to run as root to start. Use sudo ./runtipi-cli start'));
|
|
||||||
throw new Error('Tipi needs to run as root to start. Use sudo ./runtipi-cli start');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Check if user is in docker group
|
||||||
|
spinner.setMessage('Checking docker permissions...');
|
||||||
spinner.start();
|
spinner.start();
|
||||||
|
const { stdout: dockerVersion } = await execAsync('docker --version');
|
||||||
|
|
||||||
|
if (!dockerVersion) {
|
||||||
|
spinner.fail('Your user is not allowed to run docker commands. Please add your user to the docker group or run Tipi as root.');
|
||||||
|
return { success: false, message: 'You need to be in the docker group to run Tipi' };
|
||||||
|
}
|
||||||
|
spinner.done('User allowed to run docker commands');
|
||||||
|
|
||||||
spinner.setMessage('Copying system files...');
|
spinner.setMessage('Copying system files...');
|
||||||
|
spinner.start();
|
||||||
|
|
||||||
this.logger.info('Copying system files...');
|
this.logger.info('Copying system files...');
|
||||||
await copySystemFiles();
|
await copySystemFiles();
|
||||||
|
|
||||||
spinner.done('System files copied');
|
spinner.done('System files copied');
|
||||||
|
|
||||||
if (sudo) {
|
|
||||||
await this.ensureFilePermissions(this.rootFolder);
|
|
||||||
}
|
|
||||||
|
|
||||||
spinner.setMessage('Generating system env file...');
|
spinner.setMessage('Generating system env file...');
|
||||||
spinner.start();
|
spinner.start();
|
||||||
this.logger.info('Generating system env file...');
|
this.logger.info('Generating system env file...');
|
||||||
|
@ -245,69 +142,22 @@ export class SystemExecutors {
|
||||||
await execAsync(`docker compose --env-file ${this.envFile} up --detach --remove-orphans --build`);
|
await execAsync(`docker compose --env-file ${this.envFile} up --detach --remove-orphans --build`);
|
||||||
spinner.done('Containers started');
|
spinner.done('Containers started');
|
||||||
|
|
||||||
// start watcher cli in the background
|
|
||||||
spinner.setMessage('Starting watcher...');
|
|
||||||
spinner.start();
|
|
||||||
|
|
||||||
this.logger.info('Generating TLS certificates...');
|
|
||||||
await generateTlsCertificates({ domain: envMap.get('LOCAL_DOMAIN') });
|
|
||||||
|
|
||||||
if (killWatchers) {
|
|
||||||
this.logger.info('Starting watcher...');
|
|
||||||
const subprocess = spawn('./runtipi-cli', [process.argv[1] as string, 'watch'], { cwd: this.rootFolder, detached: true, stdio: ['ignore', 'ignore', 'ignore'] });
|
|
||||||
subprocess.unref();
|
|
||||||
}
|
|
||||||
|
|
||||||
spinner.done('Watcher started');
|
|
||||||
|
|
||||||
this.logger.info('Starting queue...');
|
|
||||||
const queue = new Queue('events', { connection: { host: '127.0.0.1', port: 6379, password: envMap.get('REDIS_PASSWORD') } });
|
|
||||||
this.logger.info('Obliterating queue...');
|
|
||||||
await queue.obliterate({ force: true });
|
|
||||||
|
|
||||||
// Initial jobs
|
|
||||||
this.logger.info('Adding initial jobs to queue...');
|
|
||||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent);
|
|
||||||
await queue.add(`${Math.random().toString()}_repo_clone`, { type: 'repo', command: 'clone', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
|
||||||
|
|
||||||
// Scheduled jobs
|
|
||||||
this.logger.info('Adding scheduled jobs to queue...');
|
|
||||||
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent, { repeat: { pattern: '*/30 * * * *' } });
|
|
||||||
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent, { repeat: { pattern: '* * * * *' } });
|
|
||||||
|
|
||||||
this.logger.info('Closing queue...');
|
|
||||||
await queue.close();
|
|
||||||
|
|
||||||
spinner.setMessage('Running database migrations...');
|
|
||||||
spinner.start();
|
|
||||||
|
|
||||||
this.logger.info('Running database migrations...');
|
|
||||||
await runPostgresMigrations({
|
|
||||||
postgresHost: '127.0.0.1',
|
|
||||||
postgresDatabase: envMap.get('POSTGRES_DBNAME') as string,
|
|
||||||
postgresUsername: envMap.get('POSTGRES_USERNAME') as string,
|
|
||||||
postgresPassword: envMap.get('POSTGRES_PASSWORD') as string,
|
|
||||||
postgresPort: envMap.get('POSTGRES_PORT') as string,
|
|
||||||
});
|
|
||||||
|
|
||||||
spinner.done('Database migrations complete');
|
|
||||||
|
|
||||||
// Start all apps
|
|
||||||
const appExecutor = new AppExecutors();
|
|
||||||
this.logger.info('Starting all apps...');
|
|
||||||
await appExecutor.startAllApps();
|
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
boxen(`Visit: http://${envMap.get('INTERNAL_IP')}:${envMap.get('NGINX_PORT')} to access the dashboard\n\nFind documentation and guides at: https://runtipi.io`, {
|
boxen(
|
||||||
title: 'Tipi successfully started 🎉',
|
`Visit: http://${envMap.get('INTERNAL_IP')}:${envMap.get(
|
||||||
titleAlignment: 'center',
|
'NGINX_PORT',
|
||||||
textAlignment: 'center',
|
)} to access the dashboard\n\nFind documentation and guides at: https://runtipi.io\n\nTipi is entierly written in TypeScript and we are looking for contributors!`,
|
||||||
padding: 1,
|
{
|
||||||
borderStyle: 'double',
|
title: 'Tipi successfully started 🎉',
|
||||||
borderColor: 'green',
|
titleAlignment: 'center',
|
||||||
width: 80,
|
textAlignment: 'center',
|
||||||
margin: { top: 1 },
|
padding: 1,
|
||||||
}),
|
borderStyle: 'double',
|
||||||
|
borderColor: 'green',
|
||||||
|
width: 80,
|
||||||
|
margin: { top: 1 },
|
||||||
|
},
|
||||||
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
return { success: true, message: 'Tipi started' };
|
return { success: true, message: 'Tipi started' };
|
||||||
|
@ -323,7 +173,7 @@ export class SystemExecutors {
|
||||||
public restart = async () => {
|
public restart = async () => {
|
||||||
try {
|
try {
|
||||||
await this.stop();
|
await this.stop();
|
||||||
await this.start(true, false);
|
await this.start();
|
||||||
return { success: true, message: '' };
|
return { success: true, message: '' };
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return this.handleSystemError(e);
|
return this.handleSystemError(e);
|
||||||
|
@ -357,7 +207,7 @@ export class SystemExecutors {
|
||||||
|
|
||||||
if (!targetVersion || targetVersion === 'latest') {
|
if (!targetVersion || targetVersion === 'latest') {
|
||||||
spinner.setMessage('Fetching latest version...');
|
spinner.setMessage('Fetching latest version...');
|
||||||
const { data } = await axios.get<{ tag_name: string }>('https://api.github.com/repos/meienberger/runtipi/releases/latest');
|
const { data } = await axios.get<{ tag_name: string }>('https://api.github.com/repos/runtipi/runtipi/releases/latest');
|
||||||
this.logger.info(`Getting latest version from GitHub: ${data.tag_name}`);
|
this.logger.info(`Getting latest version from GitHub: ${data.tag_name}`);
|
||||||
targetVersion = data.tag_name;
|
targetVersion = data.tag_name;
|
||||||
}
|
}
|
||||||
|
@ -377,7 +227,7 @@ export class SystemExecutors {
|
||||||
|
|
||||||
const fileName = `runtipi-cli-${targetVersion}`;
|
const fileName = `runtipi-cli-${targetVersion}`;
|
||||||
const savePath = path.join(rootFolderHost, fileName);
|
const savePath = path.join(rootFolderHost, fileName);
|
||||||
const fileUrl = `https://github.com/meienberger/runtipi/releases/download/${targetVersion}/${assetName}`;
|
const fileUrl = `https://github.com/runtipi/runtipi/releases/download/${targetVersion}/${assetName}`;
|
||||||
this.logger.info(`Downloading Tipi ${targetVersion} from ${fileUrl}`);
|
this.logger.info(`Downloading Tipi ${targetVersion} from ${fileUrl}`);
|
||||||
|
|
||||||
spinner.done(`Target version: ${targetVersion}`);
|
spinner.done(`Target version: ${targetVersion}`);
|
||||||
|
@ -407,16 +257,13 @@ export class SystemExecutors {
|
||||||
bar.stop();
|
bar.stop();
|
||||||
this.logger.error(`Failed to download Tipi: ${err}`);
|
this.logger.error(`Failed to download Tipi: ${err}`);
|
||||||
spinner.fail(`\nFailed to download Tipi ${targetVersion}`);
|
spinner.fail(`\nFailed to download Tipi ${targetVersion}`);
|
||||||
writer.close();
|
|
||||||
reject(err);
|
reject(err);
|
||||||
});
|
});
|
||||||
|
|
||||||
writer.on('finish', () => {
|
writer.on('finish', () => {
|
||||||
this.logger.info('Download complete');
|
this.logger.info('Download complete');
|
||||||
bar.stop();
|
bar.stop();
|
||||||
writer.close(() => {
|
resolve('');
|
||||||
resolve('');
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}).catch((e) => {
|
}).catch((e) => {
|
||||||
|
|
|
@ -2,13 +2,8 @@ import crypto from 'crypto';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import { envMapToString, envStringToMap, settingsSchema } from '@runtipi/shared';
|
import { envMapToString, envStringToMap, pathExists, settingsSchema } from '@runtipi/shared';
|
||||||
import { exec } from 'child_process';
|
import { logger } from '@/utils/logger/logger';
|
||||||
import { promisify } from 'util';
|
|
||||||
import chalk from 'chalk';
|
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
|
||||||
import { getRepoHash } from '../repo/repo.helpers';
|
|
||||||
import { fileLogger } from '@/utils/logger/file-logger';
|
|
||||||
|
|
||||||
type EnvKeys =
|
type EnvKeys =
|
||||||
| 'APPS_REPO_ID'
|
| 'APPS_REPO_ID'
|
||||||
|
@ -33,15 +28,12 @@ type EnvKeys =
|
||||||
| 'REDIS_PASSWORD'
|
| 'REDIS_PASSWORD'
|
||||||
| 'LOCAL_DOMAIN'
|
| 'LOCAL_DOMAIN'
|
||||||
| 'DEMO_MODE'
|
| 'DEMO_MODE'
|
||||||
|
| 'GUEST_DASHBOARD'
|
||||||
| 'TIPI_GID'
|
| 'TIPI_GID'
|
||||||
| 'TIPI_UID'
|
| 'TIPI_UID'
|
||||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||||
| (string & {});
|
| (string & {});
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
const DEFAULT_REPO_URL = 'https://github.com/meienberger/runtipi-appstore';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reads and returns the generated seed
|
* Reads and returns the generated seed
|
||||||
*/
|
*/
|
||||||
|
@ -148,168 +140,43 @@ export const generateSystemEnvFile = async () => {
|
||||||
|
|
||||||
const { data } = settings;
|
const { data } = settings;
|
||||||
|
|
||||||
const jwtSecret = envMap.get('JWT_SECRET') || (await deriveEntropy('jwt_secret'));
|
|
||||||
const repoId = getRepoHash(data.appsRepoUrl || DEFAULT_REPO_URL);
|
|
||||||
const postgresPassword = envMap.get('POSTGRES_PASSWORD') || (await deriveEntropy('postgres_password'));
|
const postgresPassword = envMap.get('POSTGRES_PASSWORD') || (await deriveEntropy('postgres_password'));
|
||||||
const redisPassword = envMap.get('REDIS_PASSWORD') || (await deriveEntropy('redis_password'));
|
const redisPassword = envMap.get('REDIS_PASSWORD') || (await deriveEntropy('redis_password'));
|
||||||
|
|
||||||
const version = await fs.promises.readFile(path.join(rootFolder, 'VERSION'), 'utf-8');
|
const version = await fs.promises.readFile(path.join(rootFolder, 'VERSION'), 'utf-8');
|
||||||
|
|
||||||
envMap.set('APPS_REPO_ID', repoId);
|
|
||||||
envMap.set('APPS_REPO_URL', data.appsRepoUrl || DEFAULT_REPO_URL);
|
|
||||||
envMap.set('TZ', Intl.DateTimeFormat().resolvedOptions().timeZone);
|
|
||||||
envMap.set('INTERNAL_IP', data.listenIp || getInternalIp());
|
envMap.set('INTERNAL_IP', data.listenIp || getInternalIp());
|
||||||
envMap.set('DNS_IP', data.dnsIp || '9.9.9.9');
|
|
||||||
envMap.set('ARCHITECTURE', getArchitecture());
|
envMap.set('ARCHITECTURE', getArchitecture());
|
||||||
envMap.set('TIPI_VERSION', version);
|
envMap.set('TIPI_VERSION', version);
|
||||||
envMap.set('JWT_SECRET', jwtSecret);
|
|
||||||
envMap.set('ROOT_FOLDER_HOST', rootFolder);
|
envMap.set('ROOT_FOLDER_HOST', rootFolder);
|
||||||
envMap.set('NGINX_PORT', String(data.port || 80));
|
envMap.set('NGINX_PORT', String(data.port || 80));
|
||||||
envMap.set('NGINX_PORT_SSL', String(data.sslPort || 443));
|
envMap.set('NGINX_PORT_SSL', String(data.sslPort || 443));
|
||||||
envMap.set('DOMAIN', data.domain || 'example.com');
|
|
||||||
envMap.set('STORAGE_PATH', data.storagePath || rootFolder);
|
envMap.set('STORAGE_PATH', data.storagePath || rootFolder);
|
||||||
envMap.set('POSTGRES_HOST', 'tipi-db');
|
|
||||||
envMap.set('POSTGRES_DBNAME', 'tipi');
|
|
||||||
envMap.set('POSTGRES_USERNAME', 'tipi');
|
|
||||||
envMap.set('POSTGRES_PASSWORD', postgresPassword);
|
envMap.set('POSTGRES_PASSWORD', postgresPassword);
|
||||||
envMap.set('POSTGRES_PORT', String(5432));
|
envMap.set('POSTGRES_PORT', String(data.postgresPort || 5432));
|
||||||
envMap.set('REDIS_HOST', 'tipi-redis');
|
envMap.set('REDIS_HOST', 'tipi-redis');
|
||||||
envMap.set('REDIS_PASSWORD', redisPassword);
|
envMap.set('REDIS_PASSWORD', redisPassword);
|
||||||
envMap.set('DEMO_MODE', String(data.demoMode || 'false'));
|
|
||||||
envMap.set('LOCAL_DOMAIN', data.localDomain || 'tipi.lan');
|
|
||||||
envMap.set('NODE_ENV', 'production');
|
envMap.set('NODE_ENV', 'production');
|
||||||
|
envMap.set('DOMAIN', data.domain || 'example.com');
|
||||||
const currentUserGroup = process.getgid ? String(process.getgid()) : '1000';
|
envMap.set('LOCAL_DOMAIN', data.localDomain || 'tipi.lan');
|
||||||
const currentUserId = process.getuid ? String(process.getuid()) : '1000';
|
|
||||||
|
|
||||||
envMap.set('TIPI_GID', currentUserGroup);
|
|
||||||
envMap.set('TIPI_UID', currentUserId);
|
|
||||||
|
|
||||||
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
||||||
|
|
||||||
return envMap;
|
return envMap;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the value of an environment variable in the .env file
|
|
||||||
*
|
|
||||||
* @param {string} key - The key of the environment variable
|
|
||||||
* @param {string} value - The value of the environment variable
|
|
||||||
*/
|
|
||||||
export const setEnvVariable = async (key: EnvKeys, value: string) => {
|
|
||||||
const rootFolder = process.cwd();
|
|
||||||
|
|
||||||
const envFilePath = path.join(rootFolder, '.env');
|
|
||||||
|
|
||||||
if (!(await pathExists(envFilePath))) {
|
|
||||||
await fs.promises.writeFile(envFilePath, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
const envFile = await fs.promises.readFile(envFilePath, 'utf-8');
|
|
||||||
const envMap: Map<EnvKeys, string> = envStringToMap(envFile);
|
|
||||||
|
|
||||||
envMap.set(key, value);
|
|
||||||
|
|
||||||
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copies the system files from the assets folder to the current working directory
|
* Copies the system files from the assets folder to the current working directory
|
||||||
*/
|
*/
|
||||||
export const copySystemFiles = async () => {
|
export const copySystemFiles = async () => {
|
||||||
// Remove old unused files
|
// Remove old unused files
|
||||||
if (await pathExists(path.join(process.cwd(), 'scripts'))) {
|
|
||||||
fileLogger.info('Removing old scripts folder');
|
|
||||||
await fs.promises.rmdir(path.join(process.cwd(), 'scripts'), { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
||||||
|
|
||||||
// Copy docker-compose.yml file
|
// Copy docker-compose.yml file
|
||||||
fileLogger.info('Copying file docker-compose.yml');
|
logger.info('Copying file docker-compose.yml');
|
||||||
await fs.promises.copyFile(path.join(assetsFolder, 'docker-compose.yml'), path.join(process.cwd(), 'docker-compose.yml'));
|
await fs.promises.copyFile(path.join(assetsFolder, 'docker-compose.yml'), path.join(process.cwd(), 'docker-compose.yml'));
|
||||||
|
|
||||||
// Copy VERSION file
|
// Copy VERSION file
|
||||||
fileLogger.info('Copying file VERSION');
|
logger.info('Copying file VERSION');
|
||||||
await fs.promises.copyFile(path.join(assetsFolder, 'VERSION'), path.join(process.cwd(), 'VERSION'));
|
await fs.promises.copyFile(path.join(assetsFolder, 'VERSION'), path.join(process.cwd(), 'VERSION'));
|
||||||
|
|
||||||
// Copy traefik folder from assets
|
|
||||||
fileLogger.info('Creating traefik folders');
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'dynamic'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'shared'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'traefik', 'tls'), { recursive: true });
|
|
||||||
|
|
||||||
fileLogger.info('Copying traefik files');
|
|
||||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'traefik.yml'), path.join(process.cwd(), 'traefik', 'traefik.yml'));
|
|
||||||
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'dynamic', 'dynamic.yml'), path.join(process.cwd(), 'traefik', 'dynamic', 'dynamic.yml'));
|
|
||||||
|
|
||||||
// Create base folders
|
|
||||||
fileLogger.info('Creating base folders');
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'apps'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'app-data'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'state'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'repos'), { recursive: true });
|
|
||||||
|
|
||||||
// Create media folders
|
|
||||||
fileLogger.info('Creating media folders');
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'watch'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'complete'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'torrents', 'incomplete'), { recursive: true });
|
|
||||||
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'watch'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'complete'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'usenet', 'incomplete'), { recursive: true });
|
|
||||||
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'watch'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'complete'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'downloads', 'incomplete'), { recursive: true });
|
|
||||||
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'books'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'comics'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'movies'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'music'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'tv'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'podcasts'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'images'), { recursive: true });
|
|
||||||
await fs.promises.mkdir(path.join(process.cwd(), 'media', 'data', 'roms'), { recursive: true });
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given a domain, generates the TLS certificates for it to be used with Traefik
|
|
||||||
*
|
|
||||||
* @param {string} data.domain The domain to generate the certificates for
|
|
||||||
*/
|
|
||||||
export const generateTlsCertificates = async (data: { domain?: string }) => {
|
|
||||||
if (!data.domain) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the certificate already exists, don't generate it again
|
|
||||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', `${data.domain}.txt`))) {
|
|
||||||
fileLogger.info(`TLS certificate for ${data.domain} already exists`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove old certificates
|
|
||||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', 'cert.pem'))) {
|
|
||||||
fileLogger.info('Removing old TLS certificate');
|
|
||||||
await fs.promises.unlink(path.join(process.cwd(), 'traefik', 'tls', 'cert.pem'));
|
|
||||||
}
|
|
||||||
if (await pathExists(path.join(process.cwd(), 'traefik', 'tls', 'key.pem'))) {
|
|
||||||
fileLogger.info('Removing old TLS key');
|
|
||||||
await fs.promises.unlink(path.join(process.cwd(), 'traefik', 'tls', 'key.pem'));
|
|
||||||
}
|
|
||||||
|
|
||||||
const subject = `/O=runtipi.io/OU=IT/CN=*.${data.domain}/emailAddress=webmaster@${data.domain}`;
|
|
||||||
const subjectAltName = `DNS:*.${data.domain},DNS:${data.domain}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
fileLogger.info(`Generating TLS certificate for ${data.domain}`);
|
|
||||||
await execAsync(`openssl req -x509 -newkey rsa:4096 -keyout traefik/tls/key.pem -out traefik/tls/cert.pem -days 365 -subj "${subject}" -addext "subjectAltName = ${subjectAltName}" -nodes`);
|
|
||||||
fileLogger.info(`Writing txt file for ${data.domain}`);
|
|
||||||
await fs.promises.writeFile(path.join(process.cwd(), 'traefik', 'tls', `${data.domain}.txt`), '');
|
|
||||||
} catch (error) {
|
|
||||||
fileLogger.error(error);
|
|
||||||
console.error(chalk.red('✗'), 'Failed to generate TLS certificates');
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,28 +3,20 @@ import { program } from 'commander';
|
||||||
|
|
||||||
import chalk from 'chalk';
|
import chalk from 'chalk';
|
||||||
import { description, version } from '../package.json';
|
import { description, version } from '../package.json';
|
||||||
import { startWorker } from './services/watcher/watcher';
|
import { AppExecutors, SystemExecutors } from './executors';
|
||||||
import { SystemExecutors } from './executors';
|
|
||||||
|
|
||||||
const main = async () => {
|
const main = async () => {
|
||||||
program.description(description).version(version);
|
program.description(description).version(version);
|
||||||
|
|
||||||
program
|
program.name('./runtipi-cli').usage('<command> [options]');
|
||||||
.command('watch')
|
|
||||||
.description('Watcher script for events queue')
|
|
||||||
.action(async () => {
|
|
||||||
console.log('Starting watcher');
|
|
||||||
startWorker();
|
|
||||||
});
|
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('start')
|
.command('start')
|
||||||
.description('Start tipi')
|
.description('Start tipi')
|
||||||
.option('--no-permissions', 'Skip permissions check')
|
.addHelpText('after', '\nExample call: sudo ./runtipi-cli start')
|
||||||
.option('--no-sudo', 'Skip sudo usage')
|
.action(async () => {
|
||||||
.action(async (options) => {
|
|
||||||
const systemExecutors = new SystemExecutors();
|
const systemExecutors = new SystemExecutors();
|
||||||
await systemExecutors.start(options.sudo);
|
await systemExecutors.start();
|
||||||
});
|
});
|
||||||
|
|
||||||
program
|
program
|
||||||
|
@ -69,6 +61,26 @@ const main = async () => {
|
||||||
await systemExecutors.cleanLogs();
|
await systemExecutors.cleanLogs();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Start app: ./cli app start <app>
|
||||||
|
// Stop app: ./cli app stop <app>
|
||||||
|
program
|
||||||
|
.command('app [command] <app>')
|
||||||
|
.addHelpText('after', '\nExample call: sudo ./runtipi-cli app start <app>')
|
||||||
|
.description('App management')
|
||||||
|
.action(async (command, app) => {
|
||||||
|
const appExecutors = new AppExecutors();
|
||||||
|
switch (command) {
|
||||||
|
case 'start':
|
||||||
|
await appExecutors.startApp(app);
|
||||||
|
break;
|
||||||
|
case 'stop':
|
||||||
|
await appExecutors.stopApp(app);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
console.log(chalk.red('✗'), 'Unknown command');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
program.parse(process.argv);
|
program.parse(process.argv);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
/**
|
|
||||||
* Returns the user id and group id of the current user
|
|
||||||
*/
|
|
||||||
export const getUserIds = () => {
|
|
||||||
if (process.getgid && process.getuid) {
|
|
||||||
const isSudo = process.getgid() === 0 && process.getuid() === 0;
|
|
||||||
|
|
||||||
return { uid: process.getuid(), gid: process.getgid(), isSudo };
|
|
||||||
}
|
|
||||||
|
|
||||||
return { uid: 1000, gid: 1000, isSudo: false };
|
|
||||||
};
|
|
19
packages/cli/src/utils/exec-async/execAsync.tsx
Normal file
19
packages/cli/src/utils/exec-async/execAsync.tsx
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
|
||||||
|
type ExecAsyncParams = [command: string];
|
||||||
|
|
||||||
|
type ExecResult = { stdout: string; stderr: string };
|
||||||
|
|
||||||
|
export const execAsync = async (...args: ExecAsyncParams): Promise<ExecResult> => {
|
||||||
|
try {
|
||||||
|
const { stdout, stderr } = await promisify(exec)(...args);
|
||||||
|
return { stdout, stderr };
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return { stderr: error.message, stdout: '' };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { stderr: String(error), stdout: '' };
|
||||||
|
}
|
||||||
|
};
|
|
@ -1,4 +0,0 @@
|
||||||
import { createLogger } from '@runtipi/shared';
|
|
||||||
import path from 'path';
|
|
||||||
|
|
||||||
export const fileLogger = createLogger('cli', path.join(process.cwd(), 'logs'));
|
|
4
packages/cli/src/utils/logger/logger.ts
Normal file
4
packages/cli/src/utils/logger/logger.ts
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
import { FileLogger } from '@runtipi/shared';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
export const logger = new FileLogger('cli', path.join(process.cwd(), 'logs'));
|
|
@ -7,7 +7,7 @@ export class TerminalSpinner {
|
||||||
|
|
||||||
frame = 0;
|
frame = 0;
|
||||||
|
|
||||||
interval: NodeJS.Timer | null = null;
|
interval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
start() {
|
start() {
|
||||||
this.interval = setInterval(() => {
|
this.interval = setInterval(() => {
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"winston": "^3.9.0",
|
"winston": "^3.11.0",
|
||||||
"zod": "^3.21.4"
|
"zod": "^3.22.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
export * from './env-helpers';
|
export * from './env-helpers';
|
||||||
|
export * from './fs-helpers';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
export * from './schemas';
|
export * from './schemas';
|
||||||
export * from './helpers';
|
export * from './helpers';
|
||||||
export { createLogger } from './utils/logger';
|
export { createLogger } from './utils/logger';
|
||||||
|
export { FileLogger } from './lib/FileLogger';
|
||||||
|
export { execAsync } from './lib/exec-async';
|
||||||
|
|
61
packages/shared/src/lib/FileLogger/FileLogger.ts
Normal file
61
packages/shared/src/lib/FileLogger/FileLogger.ts
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { createLogger } from '../../utils/logger';
|
||||||
|
|
||||||
|
function streamLogToHistory(logsFolder: string, logFile: string) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const appLogReadStream = fs.createReadStream(path.join(logsFolder, logFile), 'utf-8');
|
||||||
|
const appLogHistoryWriteStream = fs.createWriteStream(path.join(logsFolder, `${logFile}.history`), { flags: 'a' });
|
||||||
|
|
||||||
|
appLogReadStream
|
||||||
|
.pipe(appLogHistoryWriteStream)
|
||||||
|
.on('finish', () => {
|
||||||
|
fs.writeFileSync(path.join(logsFolder, logFile), '');
|
||||||
|
resolve(true);
|
||||||
|
})
|
||||||
|
.on('error', (error) => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FileLogger {
|
||||||
|
private winstonLogger;
|
||||||
|
|
||||||
|
private logsFolder;
|
||||||
|
|
||||||
|
constructor(name: string, folder: string, console?: boolean) {
|
||||||
|
this.winstonLogger = createLogger(name, folder, console);
|
||||||
|
this.logsFolder = folder;
|
||||||
|
}
|
||||||
|
|
||||||
|
public flush = async () => {
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(path.join(this.logsFolder, 'app.log'))) {
|
||||||
|
await streamLogToHistory(this.logsFolder, 'app.log');
|
||||||
|
}
|
||||||
|
if (fs.existsSync(path.join(this.logsFolder, 'error.log'))) {
|
||||||
|
await streamLogToHistory(this.logsFolder, 'error.log');
|
||||||
|
}
|
||||||
|
this.winstonLogger.info('Logs flushed');
|
||||||
|
} catch (error) {
|
||||||
|
this.winstonLogger.error('Error flushing logs', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public error = (...message: unknown[]) => {
|
||||||
|
this.winstonLogger.error(message.join(' '));
|
||||||
|
};
|
||||||
|
|
||||||
|
public info = (...message: unknown[]) => {
|
||||||
|
this.winstonLogger.info(message.join(' '));
|
||||||
|
};
|
||||||
|
|
||||||
|
public warn = (...message: unknown[]) => {
|
||||||
|
this.winstonLogger.warn(message.join(' '));
|
||||||
|
};
|
||||||
|
|
||||||
|
public debug = (...message: unknown[]) => {
|
||||||
|
this.winstonLogger.debug(message.join(' '));
|
||||||
|
};
|
||||||
|
}
|
1
packages/shared/src/lib/FileLogger/index.ts
Normal file
1
packages/shared/src/lib/FileLogger/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { FileLogger } from './FileLogger';
|
20
packages/shared/src/lib/exec-async/execAsync.tsx
Normal file
20
packages/shared/src/lib/exec-async/execAsync.tsx
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
|
||||||
|
type ExecAsyncParams = [command: string];
|
||||||
|
|
||||||
|
type ExecResult = { stdout: string; stderr: string };
|
||||||
|
|
||||||
|
export const execAsync = async (...args: ExecAsyncParams): Promise<ExecResult> => {
|
||||||
|
try {
|
||||||
|
const { stdout, stderr } = await promisify(exec)(...args);
|
||||||
|
|
||||||
|
return { stdout, stderr };
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return { stderr: error.message, stdout: '' };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { stderr: String(error), stdout: '' };
|
||||||
|
}
|
||||||
|
};
|
1
packages/shared/src/lib/exec-async/index.ts
Normal file
1
packages/shared/src/lib/exec-async/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { execAsync } from './execAsync';
|
|
@ -11,7 +11,6 @@ export const envSchema = z.object({
|
||||||
NODE_ENV: z.union([z.literal('development'), z.literal('production'), z.literal('test')]),
|
NODE_ENV: z.union([z.literal('development'), z.literal('production'), z.literal('test')]),
|
||||||
REDIS_HOST: z.string(),
|
REDIS_HOST: z.string(),
|
||||||
redisPassword: z.string(),
|
redisPassword: z.string(),
|
||||||
status: z.union([z.literal('RUNNING'), z.literal('UPDATING'), z.literal('RESTARTING')]),
|
|
||||||
architecture: z.nativeEnum(ARCHITECTURES),
|
architecture: z.nativeEnum(ARCHITECTURES),
|
||||||
dnsIp: z.string().ip().trim(),
|
dnsIp: z.string().ip().trim(),
|
||||||
rootFolder: z.string(),
|
rootFolder: z.string(),
|
||||||
|
@ -43,6 +42,14 @@ export const envSchema = z.object({
|
||||||
if (typeof value === 'boolean') return value;
|
if (typeof value === 'boolean') return value;
|
||||||
return value === 'true';
|
return value === 'true';
|
||||||
}),
|
}),
|
||||||
|
guestDashboard: z
|
||||||
|
.string()
|
||||||
|
.or(z.boolean())
|
||||||
|
.optional()
|
||||||
|
.transform((value) => {
|
||||||
|
if (typeof value === 'boolean') return value;
|
||||||
|
return value === 'true';
|
||||||
|
}),
|
||||||
seePreReleaseVersions: z
|
seePreReleaseVersions: z
|
||||||
.string()
|
.string()
|
||||||
.or(z.boolean())
|
.or(z.boolean())
|
||||||
|
@ -51,9 +58,17 @@ export const envSchema = z.object({
|
||||||
if (typeof value === 'boolean') return value;
|
if (typeof value === 'boolean') return value;
|
||||||
return value === 'true';
|
return value === 'true';
|
||||||
}),
|
}),
|
||||||
|
allowAutoThemes: z
|
||||||
|
.string()
|
||||||
|
.or(z.boolean())
|
||||||
|
.optional()
|
||||||
|
.transform((value) => {
|
||||||
|
if (typeof value === 'boolean') return value;
|
||||||
|
return value === 'true';
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
export const settingsSchema = envSchema
|
export const settingsSchema = envSchema
|
||||||
.partial()
|
.partial()
|
||||||
.pick({ dnsIp: true, internalIp: true, appsRepoUrl: true, domain: true, storagePath: true, localDomain: true, demoMode: true })
|
.pick({ dnsIp: true, internalIp: true, postgresPort: true, appsRepoUrl: true, domain: true, storagePath: true, localDomain: true, demoMode: true, guestDashboard: true, allowAutoThemes: true })
|
||||||
.and(z.object({ port: z.number(), sslPort: z.number(), listenIp: z.string().ip().trim() }).partial());
|
.and(z.object({ port: z.number(), sslPort: z.number(), listenIp: z.string().ip().trim() }).partial());
|
||||||
|
|
|
@ -12,6 +12,7 @@ const appCommandSchema = z.object({
|
||||||
type: z.literal(EVENT_TYPES.APP),
|
type: z.literal(EVENT_TYPES.APP),
|
||||||
command: z.union([z.literal('start'), z.literal('stop'), z.literal('install'), z.literal('uninstall'), z.literal('update'), z.literal('generate_env')]),
|
command: z.union([z.literal('start'), z.literal('stop'), z.literal('install'), z.literal('uninstall'), z.literal('update'), z.literal('generate_env')]),
|
||||||
appid: z.string(),
|
appid: z.string(),
|
||||||
|
skipEnv: z.boolean().optional().default(false),
|
||||||
form: z.object({}).catchall(z.any()),
|
form: z.object({}).catchall(z.any()),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -23,20 +24,14 @@ const repoCommandSchema = z.object({
|
||||||
|
|
||||||
const systemCommandSchema = z.object({
|
const systemCommandSchema = z.object({
|
||||||
type: z.literal(EVENT_TYPES.SYSTEM),
|
type: z.literal(EVENT_TYPES.SYSTEM),
|
||||||
command: z.union([z.literal('restart'), z.literal('system_info')]),
|
command: z.literal('system_info'),
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateSchema = z.object({
|
export const eventSchema = appCommandSchema.or(repoCommandSchema).or(systemCommandSchema);
|
||||||
type: z.literal(EVENT_TYPES.SYSTEM),
|
|
||||||
command: z.literal('update'),
|
|
||||||
version: z.string(),
|
|
||||||
});
|
|
||||||
|
|
||||||
export const eventSchema = appCommandSchema.or(repoCommandSchema).or(systemCommandSchema).or(updateSchema);
|
|
||||||
|
|
||||||
export const eventResultSchema = z.object({
|
export const eventResultSchema = z.object({
|
||||||
success: z.boolean(),
|
success: z.boolean(),
|
||||||
stdout: z.string(),
|
stdout: z.string(),
|
||||||
});
|
});
|
||||||
|
|
||||||
export type SystemEvent = z.infer<typeof eventSchema>;
|
export type SystemEvent = z.input<typeof eventSchema>;
|
||||||
|
|
|
@ -12,7 +12,7 @@ type Transports = transports.ConsoleTransportInstance | transports.FileTransport
|
||||||
* @param {string} id - The id of the logger, used to identify the logger in the logs
|
* @param {string} id - The id of the logger, used to identify the logger in the logs
|
||||||
* @param {string} logsFolder - The folder where the logs will be stored
|
* @param {string} logsFolder - The folder where the logs will be stored
|
||||||
*/
|
*/
|
||||||
export const newLogger = (id: string, logsFolder: string) => {
|
export const newLogger = (id: string, logsFolder: string, console?: boolean) => {
|
||||||
if (!fs.existsSync(logsFolder)) {
|
if (!fs.existsSync(logsFolder)) {
|
||||||
fs.mkdirSync(logsFolder, { recursive: true });
|
fs.mkdirSync(logsFolder, { recursive: true });
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,12 @@ export const newLogger = (id: string, logsFolder: string) => {
|
||||||
);
|
);
|
||||||
exceptionHandlers = [new transports.File({ filename: path.join(logsFolder, 'error.log') })];
|
exceptionHandlers = [new transports.File({ filename: path.join(logsFolder, 'error.log') })];
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV === 'development') {
|
||||||
|
tr.push(new transports.Console({ level: 'debug' }));
|
||||||
|
} else if (console) {
|
||||||
|
tr.push(new transports.Console({ level: 'info' }));
|
||||||
|
}
|
||||||
|
|
||||||
return createLogger({
|
return createLogger({
|
||||||
level: 'debug',
|
level: 'debug',
|
||||||
format: combine(
|
format: combine(
|
||||||
|
@ -41,7 +47,7 @@ export const newLogger = (id: string, logsFolder: string) => {
|
||||||
colorize(),
|
colorize(),
|
||||||
timestamp(),
|
timestamp(),
|
||||||
align(),
|
align(),
|
||||||
printf((info) => `${info.timestamp} - ${info.level} > ${info.message}`),
|
printf((info) => `${id}: ${info.timestamp} - ${info.level} > ${info.message}`),
|
||||||
),
|
),
|
||||||
transports: tr,
|
transports: tr,
|
||||||
exceptionHandlers,
|
exceptionHandlers,
|
||||||
|
|
14
packages/worker/.env.test
Normal file
14
packages/worker/.env.test
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
INTERNAL_IP=localhost
|
||||||
|
ARCHITECTURE=arm64
|
||||||
|
APPS_REPO_ID=repo-id
|
||||||
|
APPS_REPO_URL=https://test.com/test
|
||||||
|
ROOT_FOLDER_HOST=/runtipi
|
||||||
|
STORAGE_PATH=/runtipi
|
||||||
|
TIPI_VERSION=1
|
||||||
|
REDIS_PASSWORD=redis
|
||||||
|
REDIS_HOST=localhost
|
||||||
|
POSTGRES_HOST=localhost
|
||||||
|
POSTGRES_DBNAME=postgres
|
||||||
|
POSTGRES_USERNAME=postgres
|
||||||
|
POSTGRES_PASSWORD=postgres
|
||||||
|
POSTGRES_PORT=5433
|
39
packages/worker/.eslintrc.js
Normal file
39
packages/worker/.eslintrc.js
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
module.exports = {
|
||||||
|
root: true,
|
||||||
|
plugins: ['@typescript-eslint', 'import'],
|
||||||
|
extends: ['plugin:@typescript-eslint/recommended', 'airbnb', 'airbnb-typescript', 'eslint:recommended', 'plugin:import/typescript', 'prettier'],
|
||||||
|
parser: '@typescript-eslint/parser',
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: 'latest',
|
||||||
|
sourceType: 'module',
|
||||||
|
project: './tsconfig.json',
|
||||||
|
tsconfigRootDir: __dirname,
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
'import/prefer-default-export': 0,
|
||||||
|
'class-methods-use-this': 0,
|
||||||
|
'import/extensions': [
|
||||||
|
'error',
|
||||||
|
'ignorePackages',
|
||||||
|
{
|
||||||
|
'': 'never',
|
||||||
|
js: 'never',
|
||||||
|
jsx: 'never',
|
||||||
|
ts: 'never',
|
||||||
|
tsx: 'never',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'import/no-extraneous-dependencies': [
|
||||||
|
'error',
|
||||||
|
{
|
||||||
|
devDependencies: ['build.js', '**/*.test.{ts,tsx}', '**/mocks/**', '**/__mocks__/**', '**/*.setup.{ts,js}', '**/*.config.{ts,js}', '**/tests/**'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'arrow-body-style': 0,
|
||||||
|
'no-underscore-dangle': 0,
|
||||||
|
'no-console': 0,
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
NodeJS: true,
|
||||||
|
},
|
||||||
|
};
|
2
packages/worker/.gitignore
vendored
Normal file
2
packages/worker/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
dist/
|
||||||
|
coverage/
|
6
packages/worker/.prettierrc.js
Normal file
6
packages/worker/.prettierrc.js
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
singleQuote: true,
|
||||||
|
semi: true,
|
||||||
|
trailingComma: 'all',
|
||||||
|
printWidth: 200,
|
||||||
|
};
|
67
packages/worker/Dockerfile
Normal file
67
packages/worker/Dockerfile
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
ARG NODE_VERSION="20.10"
|
||||||
|
ARG ALPINE_VERSION="3.18"
|
||||||
|
|
||||||
|
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION} AS node_base
|
||||||
|
|
||||||
|
# ---- BUILDER BASE ----
|
||||||
|
FROM node_base AS builder_base
|
||||||
|
|
||||||
|
RUN npm install pnpm -g
|
||||||
|
RUN apk add curl
|
||||||
|
|
||||||
|
# ---- RUNNER BASE ----
|
||||||
|
FROM node_base AS runner_base
|
||||||
|
|
||||||
|
RUN apk add curl openssl git && rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
ARG NODE_ENV="production"
|
||||||
|
|
||||||
|
# ---- BUILDER ----
|
||||||
|
FROM builder_base AS builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ARG TARGETARCH
|
||||||
|
ENV TARGETARCH=${TARGETARCH}
|
||||||
|
|
||||||
|
RUN echo "Building for ${TARGETARCH}"
|
||||||
|
|
||||||
|
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
|
||||||
|
curl -L -o docker-binary "https://github.com/docker/compose/releases/download/v2.23.1/docker-compose-linux-aarch64"; \
|
||||||
|
elif [ "${TARGETARCH}" = "amd64" ]; then \
|
||||||
|
curl -L -o docker-binary "https://github.com/docker/compose/releases/download/v2.23.1/docker-compose-linux-x86_64"; \
|
||||||
|
else \
|
||||||
|
echo "Unsupported architecture"; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUN chmod +x docker-binary
|
||||||
|
|
||||||
|
COPY ./pnpm-lock.yaml ./
|
||||||
|
COPY ./pnpm-workspace.yaml ./
|
||||||
|
COPY ./patches ./patches
|
||||||
|
RUN pnpm fetch --no-scripts
|
||||||
|
|
||||||
|
COPY ./packages ./packages
|
||||||
|
|
||||||
|
RUN pnpm install -r --prefer-offline
|
||||||
|
|
||||||
|
COPY ./packages/worker/build.js ./packages/worker/build.js
|
||||||
|
COPY ./packages/worker/src ./packages/worker/src
|
||||||
|
COPY ./packages/worker/package.json ./packages/worker/package.json
|
||||||
|
COPY ./packages/worker/assets ./packages/worker/assets
|
||||||
|
|
||||||
|
RUN pnpm -r build --filter @runtipi/worker
|
||||||
|
|
||||||
|
# ---- RUNNER ----
|
||||||
|
FROM runner_base AS app
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
|
COPY --from=builder /app/packages/worker/dist .
|
||||||
|
COPY --from=builder /app/packages/worker/assets ./assets
|
||||||
|
COPY --from=builder /app/docker-binary /usr/local/bin/docker-compose
|
||||||
|
|
||||||
|
CMD ["node", "index.js", "start"]
|
||||||
|
|
41
packages/worker/Dockerfile.dev
Normal file
41
packages/worker/Dockerfile.dev
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
ARG NODE_VERSION="20.10"
|
||||||
|
ARG ALPINE_VERSION="3.18"
|
||||||
|
|
||||||
|
FROM node:${NODE_VERSION}-alpine${ALPINE_VERSION} AS node_base
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
RUN apk upgrade --update-cache --available && \
|
||||||
|
apk add openssl git docker docker-cli-compose curl && \
|
||||||
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
|
ARG TARGETARCH
|
||||||
|
ENV TARGETARCH=${TARGETARCH}
|
||||||
|
|
||||||
|
RUN echo "Building for ${TARGETARCH}"
|
||||||
|
|
||||||
|
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
|
||||||
|
curl -L -o docker-binary "https://github.com/docker/compose/releases/download/v2.23.1/docker-compose-linux-aarch64"; \
|
||||||
|
elif [ "${TARGETARCH}" = "amd64" ]; then \
|
||||||
|
curl -L -o docker-binary "https://github.com/docker/compose/releases/download/v2.23.1/docker-compose-linux-x86_64"; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUN chmod +x docker-binary
|
||||||
|
|
||||||
|
RUN mv docker-binary /usr/local/bin/docker-compose
|
||||||
|
|
||||||
|
RUN npm install pnpm -g
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY ./pnpm-lock.yaml ./
|
||||||
|
COPY ./pnpm-workspace.yaml ./
|
||||||
|
COPY ./patches ./patches
|
||||||
|
RUN pnpm fetch --no-scripts
|
||||||
|
|
||||||
|
COPY ./packages/worker/assets ./assets
|
||||||
|
COPY ./packages ./packages
|
||||||
|
|
||||||
|
RUN pnpm install -r --prefer-offline
|
||||||
|
|
||||||
|
CMD ["pnpm", "--filter", "@runtipi/worker", "-r", "dev"]
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
-- Update app table to add "is_visible_on_guest_dashboard" column
|
||||||
|
ALTER TABLE "app"
|
||||||
|
ADD COLUMN IF NOT EXISTS "is_visible_on_guest_dashboard" boolean DEFAULT FALSE;
|
||||||
|
|
||||||
|
-- Set default value to false
|
||||||
|
UPDATE
|
||||||
|
"app"
|
||||||
|
SET
|
||||||
|
"is_visible_on_guest_dashboard" = FALSE
|
||||||
|
WHERE
|
||||||
|
"is_visible_on_guest_dashboard" IS NULL;
|
||||||
|
|
||||||
|
-- Set is_visible_on_guest_dashboard column to not null constraint
|
||||||
|
ALTER TABLE "app"
|
||||||
|
ALTER COLUMN "is_visible_on_guest_dashboard" SET NOT NULL;
|
|
@ -4,7 +4,7 @@ api:
|
||||||
|
|
||||||
providers:
|
providers:
|
||||||
docker:
|
docker:
|
||||||
endpoint: "unix:///var/run/docker.sock"
|
endpoint: 'unix:///var/run/docker.sock'
|
||||||
watch: true
|
watch: true
|
||||||
exposedByDefault: false
|
exposedByDefault: false
|
||||||
file:
|
file:
|
||||||
|
@ -13,9 +13,9 @@ providers:
|
||||||
|
|
||||||
entryPoints:
|
entryPoints:
|
||||||
web:
|
web:
|
||||||
address: ":80"
|
address: ':80'
|
||||||
websecure:
|
websecure:
|
||||||
address: ":443"
|
address: ':443'
|
||||||
http:
|
http:
|
||||||
tls:
|
tls:
|
||||||
certResolver: myresolver
|
certResolver: myresolver
|
||||||
|
@ -23,7 +23,7 @@ entryPoints:
|
||||||
certificatesResolvers:
|
certificatesResolvers:
|
||||||
myresolver:
|
myresolver:
|
||||||
acme:
|
acme:
|
||||||
email: acme@thisprops.com
|
email: acme@thisprops.com
|
||||||
storage: /shared/acme.json
|
storage: /shared/acme.json
|
||||||
httpChallenge:
|
httpChallenge:
|
||||||
entryPoint: web
|
entryPoint: web
|
21
packages/worker/build.js
Normal file
21
packages/worker/build.js
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
const { build } = require('esbuild');
|
||||||
|
|
||||||
|
const commandArgs = process.argv.slice(2);
|
||||||
|
|
||||||
|
async function bundle() {
|
||||||
|
const start = Date.now();
|
||||||
|
const options = {
|
||||||
|
entryPoints: ['./src/index.ts'],
|
||||||
|
outfile: './dist/index.js',
|
||||||
|
platform: 'node',
|
||||||
|
target: 'node20',
|
||||||
|
bundle: true,
|
||||||
|
color: true,
|
||||||
|
sourcemap: commandArgs.includes('--sourcemap'),
|
||||||
|
};
|
||||||
|
|
||||||
|
await build({ ...options, minify: true });
|
||||||
|
console.log(`Build time: ${Date.now() - start}ms`);
|
||||||
|
}
|
||||||
|
|
||||||
|
bundle();
|
5
packages/worker/nodemon.json
Normal file
5
packages/worker/nodemon.json
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"watch": ["src"],
|
||||||
|
"exec": "NODE_ENV=development tsx ./src/index.ts",
|
||||||
|
"ext": "js ts"
|
||||||
|
}
|
42
packages/worker/package.json
Normal file
42
packages/worker/package.json
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
{
|
||||||
|
"name": "@runtipi/worker",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "src/index.ts",
|
||||||
|
"scripts": {
|
||||||
|
"test": "dotenv -e .env.test vitest -- --coverage --watch=false",
|
||||||
|
"test:watch": "dotenv -e .env.test vitest",
|
||||||
|
"build": "node build.js",
|
||||||
|
"tsc": "tsc",
|
||||||
|
"dev": "dotenv -e ../../.env nodemon",
|
||||||
|
"knip": "knip",
|
||||||
|
"lint": "eslint . --ext .ts"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"devDependencies": {
|
||||||
|
"@faker-js/faker": "^8.2.0",
|
||||||
|
"@types/web-push": "^3.6.3",
|
||||||
|
"dotenv-cli": "^7.3.0",
|
||||||
|
"esbuild": "^0.19.4",
|
||||||
|
"knip": "^2.41.3",
|
||||||
|
"memfs": "^4.6.0",
|
||||||
|
"nodemon": "^3.0.1",
|
||||||
|
"tsx": "^3.14.0",
|
||||||
|
"typescript": "^5.2.2",
|
||||||
|
"vite-tsconfig-paths": "^4.2.1",
|
||||||
|
"vitest": "^0.34.6"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@runtipi/postgres-migrations": "^5.3.0",
|
||||||
|
"@runtipi/shared": "workspace:^",
|
||||||
|
"bullmq": "^4.13.0",
|
||||||
|
"dotenv": "^16.3.1",
|
||||||
|
"ioredis": "^5.3.2",
|
||||||
|
"pg": "^8.11.3",
|
||||||
|
"systeminformation": "^5.21.15",
|
||||||
|
"web-push": "^3.6.6",
|
||||||
|
"zod": "^3.22.4"
|
||||||
|
}
|
||||||
|
}
|
2
packages/worker/src/config/constants.ts
Normal file
2
packages/worker/src/config/constants.ts
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
export const ROOT_FOLDER = '/app';
|
||||||
|
export const STORAGE_FOLDER = '/storage';
|
1
packages/worker/src/config/index.ts
Normal file
1
packages/worker/src/config/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export * from './constants';
|
94
packages/worker/src/index.ts
Normal file
94
packages/worker/src/index.ts
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
import { SystemEvent } from '@runtipi/shared';
|
||||||
|
import http from 'node:http';
|
||||||
|
import path from 'node:path';
|
||||||
|
import Redis from 'ioredis';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
import { Queue } from 'bullmq';
|
||||||
|
import { copySystemFiles, ensureFilePermissions, generateSystemEnvFile, generateTlsCertificates } from '@/lib/system';
|
||||||
|
import { runPostgresMigrations } from '@/lib/migrations';
|
||||||
|
import { startWorker } from './watcher/watcher';
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
|
import { AppExecutors } from './services';
|
||||||
|
|
||||||
|
const rootFolder = '/app';
|
||||||
|
const envFile = path.join(rootFolder, '.env');
|
||||||
|
|
||||||
|
const main = async () => {
|
||||||
|
try {
|
||||||
|
await logger.flush();
|
||||||
|
|
||||||
|
logger.info('Copying system files...');
|
||||||
|
await copySystemFiles();
|
||||||
|
|
||||||
|
logger.info('Generating system env file...');
|
||||||
|
const envMap = await generateSystemEnvFile();
|
||||||
|
|
||||||
|
// Reload env variables after generating the env file
|
||||||
|
logger.info('Reloading env variables...');
|
||||||
|
dotenv.config({ path: envFile, override: true });
|
||||||
|
|
||||||
|
logger.info('Generating TLS certificates...');
|
||||||
|
await generateTlsCertificates({ domain: envMap.get('LOCAL_DOMAIN') });
|
||||||
|
|
||||||
|
logger.info('Ensuring file permissions...');
|
||||||
|
await ensureFilePermissions();
|
||||||
|
|
||||||
|
logger.info('Starting queue...');
|
||||||
|
const queue = new Queue('events', { connection: { host: envMap.get('REDIS_HOST'), port: 6379, password: envMap.get('REDIS_PASSWORD') } });
|
||||||
|
logger.info('Obliterating queue...');
|
||||||
|
await queue.obliterate({ force: true });
|
||||||
|
|
||||||
|
// Initial jobs
|
||||||
|
logger.info('Adding initial jobs to queue...');
|
||||||
|
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent);
|
||||||
|
await queue.add(`${Math.random().toString()}_repo_clone`, { type: 'repo', command: 'clone', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||||
|
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent);
|
||||||
|
|
||||||
|
// Scheduled jobs
|
||||||
|
logger.info('Adding scheduled jobs to queue...');
|
||||||
|
await queue.add(`${Math.random().toString()}_repo_update`, { type: 'repo', command: 'update', url: envMap.get('APPS_REPO_URL') } as SystemEvent, { repeat: { pattern: '*/30 * * * *' } });
|
||||||
|
await queue.add(`${Math.random().toString()}_system_info`, { type: 'system', command: 'system_info' } as SystemEvent, { repeat: { pattern: '* * * * *' } });
|
||||||
|
|
||||||
|
logger.info('Closing queue...');
|
||||||
|
await queue.close();
|
||||||
|
|
||||||
|
logger.info('Running database migrations...');
|
||||||
|
await runPostgresMigrations({
|
||||||
|
postgresHost: envMap.get('POSTGRES_HOST') as string,
|
||||||
|
postgresDatabase: envMap.get('POSTGRES_DBNAME') as string,
|
||||||
|
postgresUsername: envMap.get('POSTGRES_USERNAME') as string,
|
||||||
|
postgresPassword: envMap.get('POSTGRES_PASSWORD') as string,
|
||||||
|
postgresPort: envMap.get('POSTGRES_PORT') as string,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set status to running
|
||||||
|
logger.info('Setting status to running...');
|
||||||
|
const cache = new Redis({ host: envMap.get('REDIS_HOST'), port: 6379, password: envMap.get('REDIS_PASSWORD') });
|
||||||
|
await cache.set('status', 'RUNNING');
|
||||||
|
await cache.quit();
|
||||||
|
|
||||||
|
// Start all apps
|
||||||
|
const appExecutor = new AppExecutors();
|
||||||
|
logger.info('Starting all apps...');
|
||||||
|
appExecutor.startAllApps();
|
||||||
|
|
||||||
|
const server = http.createServer((req, res) => {
|
||||||
|
if (req.url === '/healthcheck') {
|
||||||
|
res.writeHead(200);
|
||||||
|
res.end('OK');
|
||||||
|
} else {
|
||||||
|
res.writeHead(404);
|
||||||
|
res.end('Not Found');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(3000, () => {
|
||||||
|
startWorker();
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
logger.error(e);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
main();
|
125
packages/worker/src/lib/docker/docker-helpers.test.ts
Normal file
125
packages/worker/src/lib/docker/docker-helpers.test.ts
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
// const spy = vi.spyOn(dockerHelpers, 'compose').mockImplementation(() => Promise.resolve({ stdout: '', stderr: randomError }));
|
||||||
|
|
||||||
|
import { vi, it, describe, expect } from 'vitest';
|
||||||
|
import { faker } from '@faker-js/faker';
|
||||||
|
import fs from 'fs';
|
||||||
|
import { compose } from './docker-helpers';
|
||||||
|
|
||||||
|
const execAsync = vi.fn().mockImplementation(() => Promise.resolve({ stdout: '', stderr: '' }));
|
||||||
|
|
||||||
|
vi.mock('@runtipi/shared', async (importOriginal) => {
|
||||||
|
const mod = (await importOriginal()) as object;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...mod,
|
||||||
|
FileLogger: vi.fn().mockImplementation(() => ({
|
||||||
|
flush: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
})),
|
||||||
|
execAsync: (cmd: string) => execAsync(cmd),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('docker helpers', async () => {
|
||||||
|
it('should call execAsync with correct args', async () => {
|
||||||
|
// arrange
|
||||||
|
const appId = faker.word.noun().toLowerCase();
|
||||||
|
const command = faker.word.noun().toLowerCase();
|
||||||
|
|
||||||
|
// act
|
||||||
|
await compose(appId, command);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
const expected = [
|
||||||
|
'docker-compose',
|
||||||
|
`--env-file /storage/app-data/${appId}/app.env`,
|
||||||
|
`--project-name ${appId}`,
|
||||||
|
`-f /app/apps/${appId}/docker-compose.yml`,
|
||||||
|
'-f /app/repos/repo-id/apps/docker-compose.common.yml',
|
||||||
|
command,
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
expect(execAsync).toHaveBeenCalledWith(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add user env file if exists', async () => {
|
||||||
|
// arrange
|
||||||
|
const appId = faker.word.noun().toLowerCase();
|
||||||
|
const command = faker.word.noun().toLowerCase();
|
||||||
|
await fs.promises.mkdir(`/app/user-config/${appId}`, { recursive: true });
|
||||||
|
const userEnvFile = `/app/user-config/${appId}/app.env`;
|
||||||
|
await fs.promises.writeFile(userEnvFile, 'test');
|
||||||
|
|
||||||
|
// act
|
||||||
|
await compose(appId, command);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
const expected = [
|
||||||
|
'docker-compose',
|
||||||
|
`--env-file /storage/app-data/${appId}/app.env`,
|
||||||
|
`--env-file ${userEnvFile}`,
|
||||||
|
`--project-name ${appId}`,
|
||||||
|
`-f /app/apps/${appId}/docker-compose.yml`,
|
||||||
|
'-f /app/repos/repo-id/apps/docker-compose.common.yml',
|
||||||
|
command,
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
expect(execAsync).toHaveBeenCalledWith(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add user compose file if exists', async () => {
|
||||||
|
// arrange
|
||||||
|
const appId = faker.word.noun().toLowerCase();
|
||||||
|
const command = faker.word.noun().toLowerCase();
|
||||||
|
await fs.promises.mkdir(`/app/user-config/${appId}`, { recursive: true });
|
||||||
|
const userComposeFile = `/app/user-config/${appId}/docker-compose.yml`;
|
||||||
|
await fs.promises.writeFile(userComposeFile, 'test');
|
||||||
|
|
||||||
|
// act
|
||||||
|
await compose(appId, command);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
const expected = [
|
||||||
|
'docker-compose',
|
||||||
|
`--env-file /storage/app-data/${appId}/app.env`,
|
||||||
|
`--project-name ${appId}`,
|
||||||
|
`-f /app/apps/${appId}/docker-compose.yml`,
|
||||||
|
'-f /app/repos/repo-id/apps/docker-compose.common.yml',
|
||||||
|
`--file ${userComposeFile}`,
|
||||||
|
command,
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
expect(execAsync).toHaveBeenCalledWith(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add arm64 compose file if exists and arch is arm64', async () => {
|
||||||
|
// arrange
|
||||||
|
vi.mock('@/lib/environment', async (importOriginal) => {
|
||||||
|
const mod = (await importOriginal()) as object;
|
||||||
|
return { ...mod, getEnv: () => ({ arch: 'arm64', appsRepoId: 'repo-id' }) };
|
||||||
|
});
|
||||||
|
const appId = faker.word.noun().toLowerCase();
|
||||||
|
const command = faker.word.noun().toLowerCase();
|
||||||
|
await fs.promises.mkdir(`/app/apps/${appId}`, { recursive: true });
|
||||||
|
const arm64ComposeFile = `/app/apps/${appId}/docker-compose.arm64.yml`;
|
||||||
|
await fs.promises.writeFile(arm64ComposeFile, 'test');
|
||||||
|
|
||||||
|
// act
|
||||||
|
await compose(appId, command);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
const expected = [
|
||||||
|
'docker-compose',
|
||||||
|
`--env-file /storage/app-data/${appId}/app.env`,
|
||||||
|
`--project-name ${appId}`,
|
||||||
|
`-f ${arm64ComposeFile}`,
|
||||||
|
`-f /app/repos/repo-id/apps/docker-compose.common.yml`,
|
||||||
|
command,
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
expect(execAsync).toHaveBeenCalledWith(expected);
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,17 +1,15 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { promisify } from 'util';
|
import { execAsync, pathExists } from '@runtipi/shared';
|
||||||
import { exec } from 'child_process';
|
import { logger } from '@/lib/logger';
|
||||||
import { getEnv } from '../environment/environment';
|
import { getEnv } from '@/lib/environment';
|
||||||
import { pathExists } from '../fs-helpers/fs-helpers';
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
import { fileLogger } from '../logger/file-logger';
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
const composeUp = async (args: string[]) => {
|
const composeUp = async (args: string[]) => {
|
||||||
const { stdout, stderr } = await execAsync(`docker compose ${args.join(' ')}`);
|
logger.info(`Running docker compose with args ${args.join(' ')}`);
|
||||||
|
const { stdout, stderr } = await execAsync(`docker-compose ${args.join(' ')}`);
|
||||||
|
|
||||||
if (stderr) {
|
if (stderr && stderr.includes('Command failed:')) {
|
||||||
fileLogger.error(stderr);
|
throw new Error(stderr);
|
||||||
}
|
}
|
||||||
|
|
||||||
return { stdout, stderr };
|
return { stdout, stderr };
|
||||||
|
@ -23,14 +21,14 @@ const composeUp = async (args: string[]) => {
|
||||||
* @param {string} command - Command to execute
|
* @param {string} command - Command to execute
|
||||||
*/
|
*/
|
||||||
export const compose = async (appId: string, command: string) => {
|
export const compose = async (appId: string, command: string) => {
|
||||||
const { arch, rootFolderHost, appsRepoId, storagePath } = getEnv();
|
const { arch, appsRepoId } = getEnv();
|
||||||
const appDataDirPath = path.join(storagePath, 'app-data', appId);
|
const appDataDirPath = path.join(STORAGE_FOLDER, 'app-data', appId);
|
||||||
const appDirPath = path.join(rootFolderHost, 'apps', appId);
|
const appDirPath = path.join(ROOT_FOLDER, 'apps', appId);
|
||||||
|
|
||||||
const args: string[] = [`--env-file ${path.join(appDataDirPath, 'app.env')}`];
|
const args: string[] = [`--env-file ${path.join(appDataDirPath, 'app.env')}`];
|
||||||
|
|
||||||
// User custom env file
|
// User custom env file
|
||||||
const userEnvFile = path.join(rootFolderHost, 'user-config', appId, 'app.env');
|
const userEnvFile = path.join(ROOT_FOLDER, 'user-config', appId, 'app.env');
|
||||||
if (await pathExists(userEnvFile)) {
|
if (await pathExists(userEnvFile)) {
|
||||||
args.push(`--env-file ${userEnvFile}`);
|
args.push(`--env-file ${userEnvFile}`);
|
||||||
}
|
}
|
||||||
|
@ -43,11 +41,11 @@ export const compose = async (appId: string, command: string) => {
|
||||||
}
|
}
|
||||||
args.push(`-f ${composeFile}`);
|
args.push(`-f ${composeFile}`);
|
||||||
|
|
||||||
const commonComposeFile = path.join(rootFolderHost, 'repos', appsRepoId, 'apps', 'docker-compose.common.yml');
|
const commonComposeFile = path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', 'docker-compose.common.yml');
|
||||||
args.push(`-f ${commonComposeFile}`);
|
args.push(`-f ${commonComposeFile}`);
|
||||||
|
|
||||||
// User defined overrides
|
// User defined overrides
|
||||||
const userComposeFile = path.join(rootFolderHost, 'user-config', appId, 'docker-compose.yml');
|
const userComposeFile = path.join(ROOT_FOLDER, 'user-config', appId, 'docker-compose.yml');
|
||||||
if (await pathExists(userComposeFile)) {
|
if (await pathExists(userComposeFile)) {
|
||||||
args.push(`--file ${userComposeFile}`);
|
args.push(`--file ${userComposeFile}`);
|
||||||
}
|
}
|
62
packages/worker/src/lib/environment/environment.ts
Normal file
62
packages/worker/src/lib/environment/environment.ts
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import { z } from 'zod';
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV === 'development') {
|
||||||
|
dotenv.config({ path: '.env.dev', override: true });
|
||||||
|
} else {
|
||||||
|
dotenv.config({ override: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const environmentSchema = z
|
||||||
|
.object({
|
||||||
|
STORAGE_PATH: z.string(),
|
||||||
|
ROOT_FOLDER_HOST: z.string(),
|
||||||
|
APPS_REPO_ID: z.string(),
|
||||||
|
ARCHITECTURE: z.enum(['arm64', 'amd64']),
|
||||||
|
INTERNAL_IP: z.string().ip().or(z.literal('localhost')),
|
||||||
|
TIPI_VERSION: z.string(),
|
||||||
|
REDIS_PASSWORD: z.string(),
|
||||||
|
REDIS_HOST: z.string(),
|
||||||
|
POSTGRES_PORT: z.string(),
|
||||||
|
POSTGRES_USERNAME: z.string(),
|
||||||
|
POSTGRES_PASSWORD: z.string(),
|
||||||
|
POSTGRES_DBNAME: z.string(),
|
||||||
|
POSTGRES_HOST: z.string(),
|
||||||
|
})
|
||||||
|
.transform((env) => {
|
||||||
|
const {
|
||||||
|
STORAGE_PATH = '/app',
|
||||||
|
ARCHITECTURE,
|
||||||
|
ROOT_FOLDER_HOST,
|
||||||
|
APPS_REPO_ID,
|
||||||
|
INTERNAL_IP,
|
||||||
|
TIPI_VERSION,
|
||||||
|
REDIS_PASSWORD,
|
||||||
|
REDIS_HOST,
|
||||||
|
POSTGRES_DBNAME,
|
||||||
|
POSTGRES_PASSWORD,
|
||||||
|
POSTGRES_USERNAME,
|
||||||
|
POSTGRES_PORT,
|
||||||
|
POSTGRES_HOST,
|
||||||
|
...rest
|
||||||
|
} = env;
|
||||||
|
|
||||||
|
return {
|
||||||
|
storagePath: STORAGE_PATH,
|
||||||
|
rootFolderHost: ROOT_FOLDER_HOST,
|
||||||
|
appsRepoId: APPS_REPO_ID,
|
||||||
|
arch: ARCHITECTURE,
|
||||||
|
tipiVersion: TIPI_VERSION,
|
||||||
|
internalIp: INTERNAL_IP,
|
||||||
|
redisPassword: REDIS_PASSWORD,
|
||||||
|
redisHost: REDIS_HOST,
|
||||||
|
postgresPort: POSTGRES_PORT,
|
||||||
|
postgresUsername: POSTGRES_USERNAME,
|
||||||
|
postgresPassword: POSTGRES_PASSWORD,
|
||||||
|
postgresDatabase: POSTGRES_DBNAME,
|
||||||
|
postgresHost: POSTGRES_HOST,
|
||||||
|
...rest,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
export const getEnv = () => environmentSchema.parse(process.env);
|
1
packages/worker/src/lib/environment/index.ts
Normal file
1
packages/worker/src/lib/environment/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { getEnv } from './environment';
|
1
packages/worker/src/lib/logger/index.ts
Normal file
1
packages/worker/src/lib/logger/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { logger } from './logger';
|
4
packages/worker/src/lib/logger/logger.ts
Normal file
4
packages/worker/src/lib/logger/logger.ts
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
import { FileLogger } from '@runtipi/shared';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
export const logger = new FileLogger('worker', path.join('/app', 'logs'), true);
|
1
packages/worker/src/lib/migrations/index.ts
Normal file
1
packages/worker/src/lib/migrations/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { runPostgresMigrations } from './run-migration';
|
|
@ -1,7 +1,8 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import pg from 'pg';
|
import pg from 'pg';
|
||||||
import { migrate } from '@runtipi/postgres-migrations';
|
import { migrate } from '@runtipi/postgres-migrations';
|
||||||
import { fileLogger } from '../logger/file-logger';
|
import { logger } from '@/lib/logger';
|
||||||
|
import { ROOT_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
type MigrationParams = {
|
type MigrationParams = {
|
||||||
postgresHost: string;
|
postgresHost: string;
|
||||||
|
@ -12,13 +13,13 @@ type MigrationParams = {
|
||||||
};
|
};
|
||||||
|
|
||||||
export const runPostgresMigrations = async (params: MigrationParams) => {
|
export const runPostgresMigrations = async (params: MigrationParams) => {
|
||||||
const assetsFolder = path.join('/snapshot', 'runtipi', 'packages', 'cli', 'assets');
|
const assetsFolder = path.join(ROOT_FOLDER, 'assets');
|
||||||
|
|
||||||
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = params;
|
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = params;
|
||||||
|
|
||||||
fileLogger.info('Starting database migration');
|
logger.info('Starting database migration');
|
||||||
|
|
||||||
fileLogger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
logger.info(`Connecting to database ${postgresDatabase} on ${postgresHost} as ${postgresUsername} on port ${postgresPort}`);
|
||||||
|
|
||||||
const client = new pg.Client({
|
const client = new pg.Client({
|
||||||
user: postgresUsername,
|
user: postgresUsername,
|
||||||
|
@ -29,28 +30,28 @@ export const runPostgresMigrations = async (params: MigrationParams) => {
|
||||||
});
|
});
|
||||||
await client.connect();
|
await client.connect();
|
||||||
|
|
||||||
fileLogger.info('Client connected');
|
logger.info('Client connected');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { rows } = await client.query('SELECT * FROM migrations');
|
const { rows } = await client.query('SELECT * FROM migrations');
|
||||||
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
// if rows contains a migration with name 'Initial1657299198975' (legacy typeorm) delete table migrations. As all migrations are idempotent we can safely delete the table and start over.
|
||||||
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
if (rows.find((row) => row.name === 'Initial1657299198975')) {
|
||||||
fileLogger.info('Found legacy migration. Deleting table migrations');
|
logger.info('Found legacy migration. Deleting table migrations');
|
||||||
await client.query('DROP TABLE migrations');
|
await client.query('DROP TABLE migrations');
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
fileLogger.info('Migrations table not found, creating it');
|
logger.info('Migrations table not found, creating it');
|
||||||
}
|
}
|
||||||
|
|
||||||
fileLogger.info('Running migrations');
|
logger.info('Running migrations');
|
||||||
try {
|
try {
|
||||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
fileLogger.error('Error running migrations. Dropping table migrations and trying again');
|
logger.error('Error running migrations. Dropping table migrations and trying again');
|
||||||
await client.query('DROP TABLE migrations');
|
await client.query('DROP TABLE migrations');
|
||||||
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
await migrate({ client }, path.join(assetsFolder, 'migrations'), { skipCreateMigrationTable: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
fileLogger.info('Migration complete');
|
logger.info('Migration complete');
|
||||||
await client.end();
|
await client.end();
|
||||||
};
|
};
|
1
packages/worker/src/lib/system/index.ts
Normal file
1
packages/worker/src/lib/system/index.ts
Normal file
|
@ -0,0 +1 @@
|
||||||
|
export { copySystemFiles, generateSystemEnvFile, ensureFilePermissions, generateTlsCertificates } from './system.helpers';
|
278
packages/worker/src/lib/system/system.helpers.ts
Normal file
278
packages/worker/src/lib/system/system.helpers.ts
Normal file
|
@ -0,0 +1,278 @@
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
/* eslint-disable no-restricted-syntax */
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import os from 'os';
|
||||||
|
import { envMapToString, envStringToMap, execAsync, pathExists, settingsSchema } from '@runtipi/shared';
|
||||||
|
import { logger } from '../logger/logger';
|
||||||
|
import { getRepoHash } from '../../services/repo/repo.helpers';
|
||||||
|
import { ROOT_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
|
type EnvKeys =
|
||||||
|
| 'APPS_REPO_ID'
|
||||||
|
| 'APPS_REPO_URL'
|
||||||
|
| 'TZ'
|
||||||
|
| 'INTERNAL_IP'
|
||||||
|
| 'DNS_IP'
|
||||||
|
| 'ARCHITECTURE'
|
||||||
|
| 'TIPI_VERSION'
|
||||||
|
| 'JWT_SECRET'
|
||||||
|
| 'ROOT_FOLDER_HOST'
|
||||||
|
| 'NGINX_PORT'
|
||||||
|
| 'NGINX_PORT_SSL'
|
||||||
|
| 'DOMAIN'
|
||||||
|
| 'STORAGE_PATH'
|
||||||
|
| 'POSTGRES_PORT'
|
||||||
|
| 'POSTGRES_HOST'
|
||||||
|
| 'POSTGRES_DBNAME'
|
||||||
|
| 'POSTGRES_PASSWORD'
|
||||||
|
| 'POSTGRES_USERNAME'
|
||||||
|
| 'REDIS_HOST'
|
||||||
|
| 'REDIS_PASSWORD'
|
||||||
|
| 'LOCAL_DOMAIN'
|
||||||
|
| 'DEMO_MODE'
|
||||||
|
| 'GUEST_DASHBOARD'
|
||||||
|
| 'TIPI_GID'
|
||||||
|
| 'TIPI_UID'
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||||
|
| (string & {});
|
||||||
|
|
||||||
|
const OLD_DEFAULT_REPO_URL = 'https://github.com/meienberger/runtipi-appstore';
|
||||||
|
const DEFAULT_REPO_URL = 'https://github.com/runtipi/runtipi-appstore';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads and returns the generated seed
|
||||||
|
*/
|
||||||
|
const getSeed = async () => {
|
||||||
|
const seedFilePath = path.join(ROOT_FOLDER, 'state', 'seed');
|
||||||
|
|
||||||
|
if (!(await pathExists(seedFilePath))) {
|
||||||
|
throw new Error('Seed file not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const seed = await fs.promises.readFile(seedFilePath, 'utf-8');
|
||||||
|
|
||||||
|
return seed;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Derives a new entropy value from the provided entropy and the seed
|
||||||
|
* @param {string} entropy - The entropy value to derive from
|
||||||
|
*/
|
||||||
|
const deriveEntropy = async (entropy: string) => {
|
||||||
|
const seed = await getSeed();
|
||||||
|
const hmac = crypto.createHmac('sha256', seed);
|
||||||
|
hmac.update(entropy);
|
||||||
|
|
||||||
|
return hmac.digest('hex');
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a random seed if it does not exist yet
|
||||||
|
*/
|
||||||
|
const generateSeed = async () => {
|
||||||
|
if (!(await pathExists(path.join(ROOT_FOLDER, 'state', 'seed')))) {
|
||||||
|
const randomBytes = crypto.randomBytes(32);
|
||||||
|
const seed = randomBytes.toString('hex');
|
||||||
|
|
||||||
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'state', 'seed'), seed);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the architecture of the current system
|
||||||
|
*/
|
||||||
|
const getArchitecture = () => {
|
||||||
|
const arch = os.arch();
|
||||||
|
|
||||||
|
if (arch === 'arm64') return 'arm64';
|
||||||
|
if (arch === 'x64') return 'amd64';
|
||||||
|
|
||||||
|
throw new Error(`Unsupported architecture: ${arch}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a valid .env file from the settings.json file
|
||||||
|
*/
|
||||||
|
export const generateSystemEnvFile = async () => {
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'state'), { recursive: true });
|
||||||
|
const settingsFilePath = path.join(ROOT_FOLDER, 'state', 'settings.json');
|
||||||
|
const envFilePath = path.join(ROOT_FOLDER, '.env');
|
||||||
|
|
||||||
|
if (!(await pathExists(envFilePath))) {
|
||||||
|
await fs.promises.writeFile(envFilePath, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
const envFile = await fs.promises.readFile(envFilePath, 'utf-8');
|
||||||
|
|
||||||
|
const envMap: Map<EnvKeys, string> = envStringToMap(envFile);
|
||||||
|
|
||||||
|
if (!(await pathExists(settingsFilePath))) {
|
||||||
|
await fs.promises.writeFile(settingsFilePath, JSON.stringify({}));
|
||||||
|
}
|
||||||
|
|
||||||
|
const settingsFile = await fs.promises.readFile(settingsFilePath, 'utf-8');
|
||||||
|
|
||||||
|
const settings = settingsSchema.safeParse(JSON.parse(settingsFile));
|
||||||
|
|
||||||
|
if (!settings.success) {
|
||||||
|
throw new Error(`Invalid settings.json file: ${settings.error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await generateSeed();
|
||||||
|
|
||||||
|
const { data } = settings;
|
||||||
|
|
||||||
|
if (data.appsRepoUrl === OLD_DEFAULT_REPO_URL) {
|
||||||
|
data.appsRepoUrl = DEFAULT_REPO_URL;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jwtSecret = envMap.get('JWT_SECRET') || (await deriveEntropy('jwt_secret'));
|
||||||
|
const repoId = getRepoHash(data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||||
|
|
||||||
|
const rootFolderHost = envMap.get('ROOT_FOLDER_HOST');
|
||||||
|
const internalIp = envMap.get('INTERNAL_IP');
|
||||||
|
|
||||||
|
if (!rootFolderHost) {
|
||||||
|
throw new Error('ROOT_FOLDER_HOST not set in .env file');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!internalIp) {
|
||||||
|
throw new Error('INTERNAL_IP not set in .env file');
|
||||||
|
}
|
||||||
|
|
||||||
|
envMap.set('APPS_REPO_ID', repoId);
|
||||||
|
envMap.set('APPS_REPO_URL', data.appsRepoUrl || DEFAULT_REPO_URL);
|
||||||
|
envMap.set('TZ', Intl.DateTimeFormat().resolvedOptions().timeZone);
|
||||||
|
envMap.set('INTERNAL_IP', data.listenIp || internalIp);
|
||||||
|
envMap.set('DNS_IP', data.dnsIp || '9.9.9.9');
|
||||||
|
envMap.set('ARCHITECTURE', getArchitecture());
|
||||||
|
envMap.set('JWT_SECRET', jwtSecret);
|
||||||
|
envMap.set('DOMAIN', data.domain || 'example.com');
|
||||||
|
envMap.set('STORAGE_PATH', data.storagePath || envMap.get('STORAGE_PATH') || rootFolderHost);
|
||||||
|
envMap.set('POSTGRES_HOST', 'tipi-db');
|
||||||
|
envMap.set('POSTGRES_DBNAME', 'tipi');
|
||||||
|
envMap.set('POSTGRES_USERNAME', 'tipi');
|
||||||
|
envMap.set('POSTGRES_PORT', String(5432));
|
||||||
|
envMap.set('REDIS_HOST', 'tipi-redis');
|
||||||
|
envMap.set('DEMO_MODE', String(data.demoMode || 'false'));
|
||||||
|
envMap.set('GUEST_DASHBOARD', String(data.guestDashboard || 'false'));
|
||||||
|
envMap.set('LOCAL_DOMAIN', data.localDomain || 'tipi.lan');
|
||||||
|
envMap.set('NODE_ENV', 'production');
|
||||||
|
|
||||||
|
await fs.promises.writeFile(envFilePath, envMapToString(envMap));
|
||||||
|
|
||||||
|
return envMap;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copies the system files from the assets folder to the current working directory
|
||||||
|
*/
|
||||||
|
export const copySystemFiles = async () => {
|
||||||
|
// Remove old unused files
|
||||||
|
if (await pathExists(path.join(ROOT_FOLDER, 'scripts'))) {
|
||||||
|
logger.info('Removing old scripts folder');
|
||||||
|
await fs.promises.rmdir(path.join(ROOT_FOLDER, 'scripts'), { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const assetsFolder = path.join(ROOT_FOLDER, 'assets');
|
||||||
|
|
||||||
|
// Copy traefik folder from assets
|
||||||
|
logger.info('Creating traefik folders');
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'dynamic'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'shared'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'traefik', 'tls'), { recursive: true });
|
||||||
|
|
||||||
|
logger.info('Copying traefik files');
|
||||||
|
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'traefik.yml'), path.join(ROOT_FOLDER, 'traefik', 'traefik.yml'));
|
||||||
|
await fs.promises.copyFile(path.join(assetsFolder, 'traefik', 'dynamic', 'dynamic.yml'), path.join(ROOT_FOLDER, 'traefik', 'dynamic', 'dynamic.yml'));
|
||||||
|
|
||||||
|
// Create base folders
|
||||||
|
logger.info('Creating base folders');
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'apps'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'app-data'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'state'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'repos'), { recursive: true });
|
||||||
|
|
||||||
|
// Create media folders
|
||||||
|
logger.info('Creating media folders');
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'watch'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'complete'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'torrents', 'incomplete'), { recursive: true });
|
||||||
|
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'watch'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'complete'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'usenet', 'incomplete'), { recursive: true });
|
||||||
|
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'watch'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'complete'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'downloads', 'incomplete'), { recursive: true });
|
||||||
|
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'books'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'comics'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'movies'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'music'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'tv'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'podcasts'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'images'), { recursive: true });
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'media', 'data', 'roms'), { recursive: true });
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a domain, generates the TLS certificates for it to be used with Traefik
|
||||||
|
*
|
||||||
|
* @param {string} data.domain The domain to generate the certificates for
|
||||||
|
*/
|
||||||
|
export const generateTlsCertificates = async (data: { domain?: string }) => {
|
||||||
|
if (!data.domain) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the certificate already exists, don't generate it again
|
||||||
|
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', `${data.domain}.txt`))) {
|
||||||
|
logger.info(`TLS certificate for ${data.domain} already exists`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove old certificates
|
||||||
|
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', 'cert.pem'))) {
|
||||||
|
logger.info('Removing old TLS certificate');
|
||||||
|
await fs.promises.unlink(path.join(ROOT_FOLDER, 'traefik', 'tls', 'cert.pem'));
|
||||||
|
}
|
||||||
|
if (await pathExists(path.join(ROOT_FOLDER, 'traefik', 'tls', 'key.pem'))) {
|
||||||
|
logger.info('Removing old TLS key');
|
||||||
|
await fs.promises.unlink(path.join(ROOT_FOLDER, 'traefik', 'tls', 'key.pem'));
|
||||||
|
}
|
||||||
|
|
||||||
|
const subject = `/O=runtipi.io/OU=IT/CN=*.${data.domain}/emailAddress=webmaster@${data.domain}`;
|
||||||
|
const subjectAltName = `DNS:*.${data.domain},DNS:${data.domain}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
logger.info(`Generating TLS certificate for ${data.domain}`);
|
||||||
|
await execAsync(`openssl req -x509 -newkey rsa:4096 -keyout traefik/tls/key.pem -out traefik/tls/cert.pem -days 365 -subj "${subject}" -addext "subjectAltName = ${subjectAltName}" -nodes`);
|
||||||
|
logger.info(`Writing txt file for ${data.domain}`);
|
||||||
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'traefik', 'tls', `${data.domain}.txt`), '');
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const ensureFilePermissions = async () => {
|
||||||
|
const filesAndFolders = [path.join(ROOT_FOLDER, 'state'), path.join(ROOT_FOLDER, 'traefik')];
|
||||||
|
|
||||||
|
const files600 = [path.join(ROOT_FOLDER, 'traefik', 'shared', 'acme.json')];
|
||||||
|
|
||||||
|
// Give permission to read and write to all files and folders for the current user
|
||||||
|
for (const fileOrFolder of filesAndFolders) {
|
||||||
|
if (await pathExists(fileOrFolder)) {
|
||||||
|
await execAsync(`chmod -R a+rwx ${fileOrFolder}`).catch(() => {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const fileOrFolder of files600) {
|
||||||
|
if (await pathExists(fileOrFolder)) {
|
||||||
|
await execAsync(`chmod 600 ${fileOrFolder}`).catch(() => {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
|
@ -2,13 +2,14 @@ import fs from 'fs';
|
||||||
import { describe, it, expect, vi } from 'vitest';
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { faker } from '@faker-js/faker';
|
import { faker } from '@faker-js/faker';
|
||||||
|
import { pathExists } from '@runtipi/shared';
|
||||||
import { AppExecutors } from '../app.executors';
|
import { AppExecutors } from '../app.executors';
|
||||||
import { createAppConfig } from '@/tests/apps.factory';
|
import { createAppConfig } from '@/tests/apps.factory';
|
||||||
import * as dockerHelpers from '@/utils/docker-helpers';
|
import * as dockerHelpers from '@/lib/docker';
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
import { getEnv } from '@/lib/environment';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
const { storagePath, rootFolderHost, appsRepoId } = getEnv();
|
const { appsRepoId } = getEnv();
|
||||||
|
|
||||||
describe('test: app executors', () => {
|
describe('test: app executors', () => {
|
||||||
const appExecutors = new AppExecutors();
|
const appExecutors = new AppExecutors();
|
||||||
|
@ -23,7 +24,7 @@ describe('test: app executors', () => {
|
||||||
const { message, success } = await appExecutors.installApp(config.id, config);
|
const { message, success } = await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const envExists = await pathExists(path.join(storagePath, 'app-data', config.id, 'app.env'));
|
const envExists = await pathExists(path.join(STORAGE_FOLDER, 'app-data', config.id, 'app.env'));
|
||||||
|
|
||||||
expect(success).toBe(true);
|
expect(success).toBe(true);
|
||||||
expect(message).toBe(`App ${config.id} installed successfully`);
|
expect(message).toBe(`App ${config.id} installed successfully`);
|
||||||
|
@ -32,17 +33,34 @@ describe('test: app executors', () => {
|
||||||
spy.mockRestore();
|
spy.mockRestore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return error if compose script fails', async () => {
|
||||||
|
// arrange
|
||||||
|
const randomError = faker.system.fileName();
|
||||||
|
const spy = vi.spyOn(dockerHelpers, 'compose').mockImplementation(() => {
|
||||||
|
throw new Error(randomError);
|
||||||
|
});
|
||||||
|
const config = createAppConfig({}, false);
|
||||||
|
|
||||||
|
// act
|
||||||
|
const { message, success } = await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
expect(success).toBe(false);
|
||||||
|
expect(message).toContain(randomError);
|
||||||
|
spy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
it('should delete existing app folder', async () => {
|
it('should delete existing app folder', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const config = createAppConfig();
|
const config = createAppConfig();
|
||||||
await fs.promises.mkdir(path.join(rootFolderHost, 'apps', config.id), { recursive: true });
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'apps', config.id), { recursive: true });
|
||||||
await fs.promises.writeFile(path.join(rootFolderHost, 'apps', config.id, 'test.txt'), 'test');
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'apps', config.id, 'test.txt'), 'test');
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await appExecutors.installApp(config.id, config);
|
await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const exists = await pathExists(path.join(storagePath, 'apps', config.id, 'test.txt'));
|
const exists = await pathExists(path.join(STORAGE_FOLDER, 'apps', config.id, 'test.txt'));
|
||||||
|
|
||||||
expect(exists).toBe(false);
|
expect(exists).toBe(false);
|
||||||
});
|
});
|
||||||
|
@ -51,13 +69,13 @@ describe('test: app executors', () => {
|
||||||
// arrange
|
// arrange
|
||||||
const config = createAppConfig();
|
const config = createAppConfig();
|
||||||
const filename = faker.system.fileName();
|
const filename = faker.system.fileName();
|
||||||
await fs.promises.writeFile(path.join(storagePath, 'app-data', config.id, filename), 'test');
|
await fs.promises.writeFile(path.join(STORAGE_FOLDER, 'app-data', config.id, filename), 'test');
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await appExecutors.installApp(config.id, config);
|
await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const exists = await pathExists(path.join(storagePath, 'app-data', config.id, filename));
|
const exists = await pathExists(path.join(STORAGE_FOLDER, 'app-data', config.id, filename));
|
||||||
|
|
||||||
expect(exists).toBe(true);
|
expect(exists).toBe(true);
|
||||||
});
|
});
|
||||||
|
@ -66,15 +84,15 @@ describe('test: app executors', () => {
|
||||||
// arrange
|
// arrange
|
||||||
const config = createAppConfig({}, false);
|
const config = createAppConfig({}, false);
|
||||||
const filename = faker.system.fileName();
|
const filename = faker.system.fileName();
|
||||||
await fs.promises.mkdir(path.join(rootFolderHost, 'repos', appsRepoId, 'apps', config.id, 'data'), { recursive: true });
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', config.id, 'data'), { recursive: true });
|
||||||
await fs.promises.writeFile(path.join(rootFolderHost, 'repos', appsRepoId, 'apps', config.id, 'data', filename), 'test');
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', config.id, 'data', filename), 'test');
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await appExecutors.installApp(config.id, config);
|
await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const exists = await pathExists(path.join(storagePath, 'app-data', config.id, 'data', filename));
|
const exists = await pathExists(path.join(STORAGE_FOLDER, 'app-data', config.id, 'data', filename));
|
||||||
const data = await fs.promises.readFile(path.join(storagePath, 'app-data', config.id, 'data', filename), 'utf-8');
|
const data = await fs.promises.readFile(path.join(STORAGE_FOLDER, 'app-data', config.id, 'data', filename), 'utf-8');
|
||||||
|
|
||||||
expect(exists).toBe(true);
|
expect(exists).toBe(true);
|
||||||
expect(data).toBe('test');
|
expect(data).toBe('test');
|
||||||
|
@ -84,16 +102,16 @@ describe('test: app executors', () => {
|
||||||
// arrange
|
// arrange
|
||||||
const config = createAppConfig();
|
const config = createAppConfig();
|
||||||
const filename = faker.system.fileName();
|
const filename = faker.system.fileName();
|
||||||
await fs.promises.writeFile(path.join(storagePath, 'app-data', config.id, 'data', filename), 'test');
|
await fs.promises.writeFile(path.join(STORAGE_FOLDER, 'app-data', config.id, 'data', filename), 'test');
|
||||||
await fs.promises.mkdir(path.join(rootFolderHost, 'repos', appsRepoId, 'apps', config.id, 'data'), { recursive: true });
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', config.id, 'data'), { recursive: true });
|
||||||
await fs.promises.writeFile(path.join(rootFolderHost, 'repos', appsRepoId, 'apps', config.id, 'data', filename), 'yeah');
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', config.id, 'data', filename), 'yeah');
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await appExecutors.installApp(config.id, config);
|
await appExecutors.installApp(config.id, config);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const exists = await pathExists(path.join(storagePath, 'app-data', config.id, 'data', filename));
|
const exists = await pathExists(path.join(STORAGE_FOLDER, 'app-data', config.id, 'data', filename));
|
||||||
const data = await fs.promises.readFile(path.join(storagePath, 'app-data', config.id, 'data', filename), 'utf-8');
|
const data = await fs.promises.readFile(path.join(STORAGE_FOLDER, 'app-data', config.id, 'data', filename), 'utf-8');
|
||||||
|
|
||||||
expect(exists).toBe(true);
|
expect(exists).toBe(true);
|
||||||
expect(data).toBe('test');
|
expect(data).toBe('test');
|
|
@ -1,20 +1,18 @@
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { describe, it, expect } from 'vitest';
|
import { describe, it, expect } from 'vitest';
|
||||||
import { faker } from '@faker-js/faker';
|
import { faker } from '@faker-js/faker';
|
||||||
|
import { pathExists } from '@runtipi/shared';
|
||||||
import { copyDataDir, generateEnvFile } from '../app.helpers';
|
import { copyDataDir, generateEnvFile } from '../app.helpers';
|
||||||
import { createAppConfig } from '@/tests/apps.factory';
|
import { createAppConfig } from '@/tests/apps.factory';
|
||||||
import { getAppEnvMap } from '../env.helpers';
|
import { getAppEnvMap } from '../env.helpers';
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
|
||||||
|
|
||||||
const { rootFolderHost, storagePath } = getEnv();
|
|
||||||
|
|
||||||
describe('app helpers', () => {
|
describe('app helpers', () => {
|
||||||
describe('Test: generateEnvFile()', () => {
|
describe('Test: generateEnvFile()', () => {
|
||||||
it('should throw an error if the app has an invalid config.json file', async () => {
|
it('should throw an error if the app has an invalid config.json file', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig();
|
const appConfig = createAppConfig();
|
||||||
await fs.promises.writeFile(`${rootFolderHost}/apps/${appConfig.id}/config.json`, '{}');
|
await fs.promises.writeFile(`${ROOT_FOLDER}/apps/${appConfig.id}/config.json`, '{}');
|
||||||
|
|
||||||
// act & assert
|
// act & assert
|
||||||
expect(generateEnvFile(appConfig.id, {})).rejects.toThrowError(`App ${appConfig.id} has invalid config.json file`);
|
expect(generateEnvFile(appConfig.id, {})).rejects.toThrowError(`App ${appConfig.id} has invalid config.json file`);
|
||||||
|
@ -50,8 +48,8 @@ describe('app helpers', () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig({ form_fields: [{ env_variable: 'RANDOM_FIELD', type: 'random', label: 'test', min: 32, max: 32, required: true }] });
|
const appConfig = createAppConfig({ form_fields: [{ env_variable: 'RANDOM_FIELD', type: 'random', label: 'test', min: 32, max: 32, required: true }] });
|
||||||
const randomField = faker.string.alphanumeric(32);
|
const randomField = faker.string.alphanumeric(32);
|
||||||
await fs.promises.mkdir(`${rootFolderHost}/app-data/${appConfig.id}`, { recursive: true });
|
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${appConfig.id}`, { recursive: true });
|
||||||
await fs.promises.writeFile(`${rootFolderHost}/app-data/${appConfig.id}/app.env`, `RANDOM_FIELD=${randomField}`);
|
await fs.promises.writeFile(`${STORAGE_FOLDER}/app-data/${appConfig.id}/app.env`, `RANDOM_FIELD=${randomField}`);
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await generateEnvFile(appConfig.id, {});
|
await generateEnvFile(appConfig.id, {});
|
||||||
|
@ -117,7 +115,7 @@ describe('app helpers', () => {
|
||||||
it('Should not re-create app-data folder if it already exists', async () => {
|
it('Should not re-create app-data folder if it already exists', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig({});
|
const appConfig = createAppConfig({});
|
||||||
await fs.promises.mkdir(`${rootFolderHost}/app-data/${appConfig.id}`, { recursive: true });
|
await fs.promises.mkdir(`${ROOT_FOLDER}/app-data/${appConfig.id}`, { recursive: true });
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await generateEnvFile(appConfig.id, {});
|
await generateEnvFile(appConfig.id, {});
|
||||||
|
@ -161,8 +159,8 @@ describe('app helpers', () => {
|
||||||
const vapidPublicKey = faker.string.alphanumeric(32);
|
const vapidPublicKey = faker.string.alphanumeric(32);
|
||||||
|
|
||||||
// act
|
// act
|
||||||
await fs.promises.mkdir(`${rootFolderHost}/app-data/${appConfig.id}`, { recursive: true });
|
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${appConfig.id}`, { recursive: true });
|
||||||
await fs.promises.writeFile(`${rootFolderHost}/app-data/${appConfig.id}/app.env`, `VAPID_PRIVATE_KEY=${vapidPrivateKey}\nVAPID_PUBLIC_KEY=${vapidPublicKey}`);
|
await fs.promises.writeFile(`${STORAGE_FOLDER}/app-data/${appConfig.id}/app.env`, `VAPID_PRIVATE_KEY=${vapidPrivateKey}\nVAPID_PUBLIC_KEY=${vapidPublicKey}`);
|
||||||
await generateEnvFile(appConfig.id, {});
|
await generateEnvFile(appConfig.id, {});
|
||||||
const envmap = await getAppEnvMap(appConfig.id);
|
const envmap = await getAppEnvMap(appConfig.id);
|
||||||
|
|
||||||
|
@ -181,13 +179,13 @@ describe('app helpers', () => {
|
||||||
await copyDataDir(appConfig.id);
|
await copyDataDir(appConfig.id);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
expect(await pathExists(`${rootFolderHost}/apps/${appConfig.id}/data`)).toBe(false);
|
expect(await pathExists(`${ROOT_FOLDER}/apps/${appConfig.id}/data`)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy data dir to app-data folder', async () => {
|
it('should copy data dir to app-data folder', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig({});
|
const appConfig = createAppConfig({});
|
||||||
const dataDir = `${rootFolderHost}/apps/${appConfig.id}/data`;
|
const dataDir = `${ROOT_FOLDER}/apps/${appConfig.id}/data`;
|
||||||
|
|
||||||
await fs.promises.mkdir(dataDir, { recursive: true });
|
await fs.promises.mkdir(dataDir, { recursive: true });
|
||||||
await fs.promises.writeFile(`${dataDir}/test.txt`, 'test');
|
await fs.promises.writeFile(`${dataDir}/test.txt`, 'test');
|
||||||
|
@ -196,14 +194,14 @@ describe('app helpers', () => {
|
||||||
await copyDataDir(appConfig.id);
|
await copyDataDir(appConfig.id);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const appDataDir = `${storagePath}/app-data/${appConfig.id}`;
|
const appDataDir = `${STORAGE_FOLDER}/app-data/${appConfig.id}`;
|
||||||
expect(await fs.promises.readFile(`${appDataDir}/data/test.txt`, 'utf8')).toBe('test');
|
expect(await fs.promises.readFile(`${appDataDir}/data/test.txt`, 'utf8')).toBe('test');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy folders recursively', async () => {
|
it('should copy folders recursively', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig({});
|
const appConfig = createAppConfig({});
|
||||||
const dataDir = `${rootFolderHost}/apps/${appConfig.id}/data`;
|
const dataDir = `${ROOT_FOLDER}/apps/${appConfig.id}/data`;
|
||||||
|
|
||||||
await fs.promises.mkdir(dataDir, { recursive: true });
|
await fs.promises.mkdir(dataDir, { recursive: true });
|
||||||
|
|
||||||
|
@ -217,7 +215,7 @@ describe('app helpers', () => {
|
||||||
await copyDataDir(appConfig.id);
|
await copyDataDir(appConfig.id);
|
||||||
|
|
||||||
// assert
|
// assert
|
||||||
const appDataDir = `${storagePath}/app-data/${appConfig.id}`;
|
const appDataDir = `${STORAGE_FOLDER}/app-data/${appConfig.id}`;
|
||||||
expect(await fs.promises.readFile(`${appDataDir}/data/subdir/subsubdir/test.txt`, 'utf8')).toBe('test');
|
expect(await fs.promises.readFile(`${appDataDir}/data/subdir/subsubdir/test.txt`, 'utf8')).toBe('test');
|
||||||
expect(await fs.promises.readFile(`${appDataDir}/data/test.txt`, 'utf8')).toBe('test');
|
expect(await fs.promises.readFile(`${appDataDir}/data/test.txt`, 'utf8')).toBe('test');
|
||||||
});
|
});
|
||||||
|
@ -225,8 +223,8 @@ describe('app helpers', () => {
|
||||||
it('should replace the content of .template files with the content of the app.env file', async () => {
|
it('should replace the content of .template files with the content of the app.env file', async () => {
|
||||||
// arrange
|
// arrange
|
||||||
const appConfig = createAppConfig({});
|
const appConfig = createAppConfig({});
|
||||||
const dataDir = `${rootFolderHost}/apps/${appConfig.id}/data`;
|
const dataDir = `${ROOT_FOLDER}/apps/${appConfig.id}/data`;
|
||||||
const appDataDir = `${storagePath}/app-data/${appConfig.id}`;
|
const appDataDir = `${STORAGE_FOLDER}/app-data/${appConfig.id}`;
|
||||||
|
|
||||||
await fs.promises.mkdir(dataDir, { recursive: true });
|
await fs.promises.mkdir(dataDir, { recursive: true });
|
||||||
await fs.promises.mkdir(appDataDir, { recursive: true });
|
await fs.promises.mkdir(appDataDir, { recursive: true });
|
297
packages/worker/src/services/app/app.executors.ts
Normal file
297
packages/worker/src/services/app/app.executors.ts
Normal file
|
@ -0,0 +1,297 @@
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
|
/* eslint-disable no-restricted-syntax */
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import pg from 'pg';
|
||||||
|
import { execAsync, pathExists } from '@runtipi/shared';
|
||||||
|
import { copyDataDir, generateEnvFile } from './app.helpers';
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
|
import { compose } from '@/lib/docker';
|
||||||
|
import { getEnv } from '@/lib/environment';
|
||||||
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
|
const getDbClient = async () => {
|
||||||
|
const { postgresHost, postgresDatabase, postgresUsername, postgresPassword, postgresPort } = getEnv();
|
||||||
|
|
||||||
|
const client = new pg.Client({
|
||||||
|
host: postgresHost,
|
||||||
|
database: postgresDatabase,
|
||||||
|
user: postgresUsername,
|
||||||
|
password: postgresPassword,
|
||||||
|
port: Number(postgresPort),
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.connect();
|
||||||
|
|
||||||
|
return client;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class AppExecutors {
|
||||||
|
private readonly logger;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleAppError = (err: unknown) => {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
this.logger.error(`An error occurred: ${err.message}`);
|
||||||
|
return { success: false, message: err.message };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, message: `An error occurred: ${err}` };
|
||||||
|
};
|
||||||
|
|
||||||
|
private getAppPaths = (appId: string) => {
|
||||||
|
const { appsRepoId } = getEnv();
|
||||||
|
|
||||||
|
const appDataDirPath = path.join(STORAGE_FOLDER, 'app-data', appId);
|
||||||
|
const appDirPath = path.join(ROOT_FOLDER, 'apps', appId);
|
||||||
|
const configJsonPath = path.join(appDirPath, 'config.json');
|
||||||
|
const repoPath = path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps', appId);
|
||||||
|
|
||||||
|
return { appDataDirPath, appDirPath, configJsonPath, repoPath };
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given an app id, ensures that the app folder exists in the apps folder
|
||||||
|
* If not, copies the app folder from the repo
|
||||||
|
* @param {string} appId - App id
|
||||||
|
*/
|
||||||
|
private ensureAppDir = async (appId: string) => {
|
||||||
|
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||||
|
const dockerFilePath = path.join(ROOT_FOLDER, 'apps', appId, 'docker-compose.yml');
|
||||||
|
|
||||||
|
if (!(await pathExists(dockerFilePath))) {
|
||||||
|
// delete eventual app folder if exists
|
||||||
|
this.logger.info(`Deleting app ${appId} folder if exists`);
|
||||||
|
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||||
|
|
||||||
|
// Copy app folder from repo
|
||||||
|
this.logger.info(`Copying app ${appId} from repo ${getEnv().appsRepoId}`);
|
||||||
|
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Install an app from the repo
|
||||||
|
* @param {string} appId - The id of the app to install
|
||||||
|
* @param {Record<string, unknown>} config - The config of the app
|
||||||
|
*/
|
||||||
|
public installApp = async (appId: string, config: Record<string, unknown>) => {
|
||||||
|
try {
|
||||||
|
if (process.getuid && process.getgid) {
|
||||||
|
this.logger.info(`Installing app ${appId} as User ID: ${process.getuid()}, Group ID: ${process.getgid()}`);
|
||||||
|
} else {
|
||||||
|
this.logger.info(`Installing app ${appId}. No User ID or Group ID found.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { appsRepoId } = getEnv();
|
||||||
|
|
||||||
|
const { appDirPath, repoPath, appDataDirPath } = this.getAppPaths(appId);
|
||||||
|
|
||||||
|
// Check if app exists in repo
|
||||||
|
const apps = await fs.promises.readdir(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps'));
|
||||||
|
|
||||||
|
if (!apps.includes(appId)) {
|
||||||
|
this.logger.error(`App ${appId} not found in repo ${appsRepoId}`);
|
||||||
|
return { success: false, message: `App ${appId} not found in repo ${appsRepoId}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete app folder if exists
|
||||||
|
this.logger.info(`Deleting folder ${appDirPath} if exists`);
|
||||||
|
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||||
|
|
||||||
|
// Create app folder
|
||||||
|
this.logger.info(`Creating folder ${appDirPath}`);
|
||||||
|
await fs.promises.mkdir(appDirPath, { recursive: true });
|
||||||
|
|
||||||
|
// Copy app folder from repo
|
||||||
|
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||||
|
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||||
|
|
||||||
|
// Create folder app-data folder
|
||||||
|
this.logger.info(`Creating folder ${appDataDirPath}`);
|
||||||
|
await fs.promises.mkdir(appDataDirPath, { recursive: true });
|
||||||
|
|
||||||
|
// Create app.env file
|
||||||
|
this.logger.info(`Creating app.env file for app ${appId}`);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
|
||||||
|
// Copy data dir
|
||||||
|
this.logger.info(`Copying data dir for app ${appId}`);
|
||||||
|
if (!(await pathExists(`${appDataDirPath}/data`))) {
|
||||||
|
await copyDataDir(appId);
|
||||||
|
}
|
||||||
|
|
||||||
|
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||||
|
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// run docker-compose up
|
||||||
|
this.logger.info(`Running docker-compose up for app ${appId}`);
|
||||||
|
await compose(appId, 'up -d');
|
||||||
|
|
||||||
|
this.logger.info(`Docker-compose up for app ${appId} finished`);
|
||||||
|
|
||||||
|
return { success: true, message: `App ${appId} installed successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stops an app
|
||||||
|
* @param {string} appId - The id of the app to stop
|
||||||
|
* @param {Record<string, unknown>} config - The config of the app
|
||||||
|
*/
|
||||||
|
public stopApp = async (appId: string, config: Record<string, unknown>, skipEnvGeneration = false) => {
|
||||||
|
try {
|
||||||
|
this.logger.info(`Stopping app ${appId}`);
|
||||||
|
|
||||||
|
await this.ensureAppDir(appId);
|
||||||
|
|
||||||
|
if (!skipEnvGeneration) {
|
||||||
|
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
}
|
||||||
|
await compose(appId, 'rm --force --stop');
|
||||||
|
|
||||||
|
this.logger.info(`App ${appId} stopped`);
|
||||||
|
return { success: true, message: `App ${appId} stopped successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public startApp = async (appId: string, config: Record<string, unknown>, skipEnvGeneration = false) => {
|
||||||
|
try {
|
||||||
|
const { appDataDirPath } = this.getAppPaths(appId);
|
||||||
|
|
||||||
|
this.logger.info(`Starting app ${appId}`);
|
||||||
|
|
||||||
|
await this.ensureAppDir(appId);
|
||||||
|
|
||||||
|
if (!skipEnvGeneration) {
|
||||||
|
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
}
|
||||||
|
|
||||||
|
await compose(appId, 'up --detach --force-recreate --remove-orphans --pull always');
|
||||||
|
|
||||||
|
this.logger.info(`App ${appId} started`);
|
||||||
|
|
||||||
|
this.logger.info(`Setting permissions for app ${appId}`);
|
||||||
|
await execAsync(`chmod -R a+rwx ${path.join(appDataDirPath)}`).catch(() => {
|
||||||
|
this.logger.error(`Error setting permissions for app ${appId}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
return { success: true, message: `App ${appId} started successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public uninstallApp = async (appId: string, config: Record<string, unknown>) => {
|
||||||
|
try {
|
||||||
|
const { appDirPath, appDataDirPath } = this.getAppPaths(appId);
|
||||||
|
this.logger.info(`Uninstalling app ${appId}`);
|
||||||
|
|
||||||
|
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||||
|
await this.ensureAppDir(appId);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
try {
|
||||||
|
await compose(appId, 'down --remove-orphans --volumes --rmi all');
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Error && err.message.includes('conflict')) {
|
||||||
|
this.logger.warn(`Could not fully uninstall app ${appId}. Some images are in use by other apps. Consider cleaning unused images docker system prune -a`);
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||||
|
await fs.promises.rm(appDirPath, { recursive: true, force: true }).catch((err) => {
|
||||||
|
this.logger.error(`Error deleting folder ${appDirPath}: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.info(`Deleting folder ${appDataDirPath}`);
|
||||||
|
await fs.promises.rm(appDataDirPath, { recursive: true, force: true }).catch((err) => {
|
||||||
|
this.logger.error(`Error deleting folder ${appDataDirPath}: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.info(`App ${appId} uninstalled`);
|
||||||
|
return { success: true, message: `App ${appId} uninstalled successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public updateApp = async (appId: string, config: Record<string, unknown>) => {
|
||||||
|
try {
|
||||||
|
const { appDirPath, repoPath } = this.getAppPaths(appId);
|
||||||
|
this.logger.info(`Updating app ${appId}`);
|
||||||
|
await this.ensureAppDir(appId);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
|
||||||
|
await compose(appId, 'up --detach --force-recreate --remove-orphans');
|
||||||
|
await compose(appId, 'down --rmi all --remove-orphans');
|
||||||
|
|
||||||
|
this.logger.info(`Deleting folder ${appDirPath}`);
|
||||||
|
await fs.promises.rm(appDirPath, { recursive: true, force: true });
|
||||||
|
|
||||||
|
this.logger.info(`Copying folder ${repoPath} to ${appDirPath}`);
|
||||||
|
await fs.promises.cp(repoPath, appDirPath, { recursive: true });
|
||||||
|
|
||||||
|
await compose(appId, 'pull');
|
||||||
|
|
||||||
|
return { success: true, message: `App ${appId} updated successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public regenerateAppEnv = async (appId: string, config: Record<string, unknown>) => {
|
||||||
|
try {
|
||||||
|
this.logger.info(`Regenerating app.env file for app ${appId}`);
|
||||||
|
await this.ensureAppDir(appId);
|
||||||
|
await generateEnvFile(appId, config);
|
||||||
|
return { success: true, message: `App ${appId} env file regenerated successfully` };
|
||||||
|
} catch (err) {
|
||||||
|
return this.handleAppError(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start all apps with status running
|
||||||
|
*/
|
||||||
|
public startAllApps = async () => {
|
||||||
|
const client = await getDbClient();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get all apps with status running
|
||||||
|
const { rows } = await client.query(`SELECT * FROM app WHERE status = 'running'`);
|
||||||
|
|
||||||
|
// Update all apps with status different than running or stopped to stopped
|
||||||
|
await client.query(`UPDATE app SET status = 'stopped' WHERE status != 'stopped' AND status != 'running' AND status != 'missing'`);
|
||||||
|
|
||||||
|
// Start all apps
|
||||||
|
for (const row of rows) {
|
||||||
|
const { id, config } = row;
|
||||||
|
|
||||||
|
const { success } = await this.startApp(id, config);
|
||||||
|
|
||||||
|
if (!success) {
|
||||||
|
this.logger.error(`Error starting app ${id}`);
|
||||||
|
await client.query(`UPDATE app SET status = 'stopped' WHERE id = '${id}'`);
|
||||||
|
} else {
|
||||||
|
await client.query(`UPDATE app SET status = 'running' WHERE id = '${id}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.error(`Error starting apps: ${err}`);
|
||||||
|
} finally {
|
||||||
|
await client.end();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,14 +1,10 @@
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { appInfoSchema, envMapToString, envStringToMap } from '@runtipi/shared';
|
import { appInfoSchema, envMapToString, envStringToMap, execAsync, pathExists } from '@runtipi/shared';
|
||||||
import { exec } from 'child_process';
|
|
||||||
import { promisify } from 'util';
|
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
|
||||||
import { generateVapidKeys, getAppEnvMap } from './env.helpers';
|
import { generateVapidKeys, getAppEnvMap } from './env.helpers';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
import { getEnv } from '@/lib/environment';
|
||||||
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function generates a random string of the provided length by using the SHA-256 hash algorithm.
|
* This function generates a random string of the provided length by using the SHA-256 hash algorithm.
|
||||||
|
@ -20,7 +16,7 @@ const execAsync = promisify(exec);
|
||||||
*/
|
*/
|
||||||
const getEntropy = async (name: string, length: number) => {
|
const getEntropy = async (name: string, length: number) => {
|
||||||
const hash = crypto.createHash('sha256');
|
const hash = crypto.createHash('sha256');
|
||||||
const seed = await fs.promises.readFile(path.join(getEnv().rootFolderHost, 'state', 'seed'));
|
const seed = await fs.promises.readFile(path.join(ROOT_FOLDER, 'state', 'seed'));
|
||||||
|
|
||||||
hash.update(name + seed.toString());
|
hash.update(name + seed.toString());
|
||||||
return hash.digest('hex').substring(0, length);
|
return hash.digest('hex').substring(0, length);
|
||||||
|
@ -39,16 +35,16 @@ const getEntropy = async (name: string, length: number) => {
|
||||||
* @throws Will throw an error if the app has an invalid config.json file or if a required variable is missing.
|
* @throws Will throw an error if the app has an invalid config.json file or if a required variable is missing.
|
||||||
*/
|
*/
|
||||||
export const generateEnvFile = async (appId: string, config: Record<string, unknown>) => {
|
export const generateEnvFile = async (appId: string, config: Record<string, unknown>) => {
|
||||||
const { rootFolderHost, storagePath, internalIp } = getEnv();
|
const { internalIp, storagePath, rootFolderHost } = getEnv();
|
||||||
|
|
||||||
const configFile = await fs.promises.readFile(path.join(rootFolderHost, 'apps', appId, 'config.json'));
|
const configFile = await fs.promises.readFile(path.join(ROOT_FOLDER, 'apps', appId, 'config.json'));
|
||||||
const parsedConfig = appInfoSchema.safeParse(JSON.parse(configFile.toString()));
|
const parsedConfig = appInfoSchema.safeParse(JSON.parse(configFile.toString()));
|
||||||
|
|
||||||
if (!parsedConfig.success) {
|
if (!parsedConfig.success) {
|
||||||
throw new Error(`App ${appId} has invalid config.json file`);
|
throw new Error(`App ${appId} has invalid config.json file`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const baseEnvFile = await fs.promises.readFile(path.join(rootFolderHost, '.env'));
|
const baseEnvFile = await fs.promises.readFile(path.join(ROOT_FOLDER, '.env'));
|
||||||
const envMap = envStringToMap(baseEnvFile.toString());
|
const envMap = envStringToMap(baseEnvFile.toString());
|
||||||
|
|
||||||
// Default always present env variables
|
// Default always present env variables
|
||||||
|
@ -100,15 +96,16 @@ export const generateEnvFile = async (appId: string, config: Record<string, unkn
|
||||||
} else {
|
} else {
|
||||||
envMap.set('APP_DOMAIN', `${internalIp}:${parsedConfig.data.port}`);
|
envMap.set('APP_DOMAIN', `${internalIp}:${parsedConfig.data.port}`);
|
||||||
envMap.set('APP_HOST', internalIp);
|
envMap.set('APP_HOST', internalIp);
|
||||||
|
envMap.set('APP_PROTOCOL', 'http');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create app-data folder if it doesn't exist
|
// Create app-data folder if it doesn't exist
|
||||||
const appDataDirectoryExists = await fs.promises.stat(path.join(storagePath, 'app-data', appId)).catch(() => false);
|
const appDataDirectoryExists = await fs.promises.stat(path.join(STORAGE_FOLDER, 'app-data', appId)).catch(() => false);
|
||||||
if (!appDataDirectoryExists) {
|
if (!appDataDirectoryExists) {
|
||||||
await fs.promises.mkdir(path.join(storagePath, 'app-data', appId), { recursive: true });
|
await fs.promises.mkdir(path.join(STORAGE_FOLDER, 'app-data', appId), { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
await fs.promises.writeFile(path.join(storagePath, 'app-data', appId, 'app.env'), envMapToString(envMap));
|
await fs.promises.writeFile(path.join(STORAGE_FOLDER, 'app-data', appId, 'app.env'), envMapToString(envMap));
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -135,40 +132,38 @@ const renderTemplate = (template: string, envMap: Map<string, string>) => {
|
||||||
* @param {string} id - The id of the app.
|
* @param {string} id - The id of the app.
|
||||||
*/
|
*/
|
||||||
export const copyDataDir = async (id: string) => {
|
export const copyDataDir = async (id: string) => {
|
||||||
const { rootFolderHost, storagePath } = getEnv();
|
|
||||||
|
|
||||||
const envMap = await getAppEnvMap(id);
|
const envMap = await getAppEnvMap(id);
|
||||||
|
|
||||||
// return if app does not have a data directory
|
// return if app does not have a data directory
|
||||||
if (!(await pathExists(`${rootFolderHost}/apps/${id}/data`))) {
|
if (!(await pathExists(`${ROOT_FOLDER}/apps/${id}/data`))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create app-data folder if it doesn't exist
|
// Create app-data folder if it doesn't exist
|
||||||
if (!(await pathExists(`${storagePath}/app-data/${id}/data`))) {
|
if (!(await pathExists(`${STORAGE_FOLDER}/app-data/${id}/data`))) {
|
||||||
await fs.promises.mkdir(`${storagePath}/app-data/${id}/data`, { recursive: true });
|
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${id}/data`, { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
const dataDir = await fs.promises.readdir(`${rootFolderHost}/apps/${id}/data`);
|
const dataDir = await fs.promises.readdir(`${ROOT_FOLDER}/apps/${id}/data`);
|
||||||
|
|
||||||
const processFile = async (file: string) => {
|
const processFile = async (file: string) => {
|
||||||
if (file.endsWith('.template')) {
|
if (file.endsWith('.template')) {
|
||||||
const template = await fs.promises.readFile(`${rootFolderHost}/apps/${id}/data/${file}`, 'utf-8');
|
const template = await fs.promises.readFile(`${ROOT_FOLDER}/apps/${id}/data/${file}`, 'utf-8');
|
||||||
const renderedTemplate = renderTemplate(template, envMap);
|
const renderedTemplate = renderTemplate(template, envMap);
|
||||||
|
|
||||||
await fs.promises.writeFile(`${storagePath}/app-data/${id}/data/${file.replace('.template', '')}`, renderedTemplate);
|
await fs.promises.writeFile(`${STORAGE_FOLDER}/app-data/${id}/data/${file.replace('.template', '')}`, renderedTemplate);
|
||||||
} else {
|
} else {
|
||||||
await fs.promises.copyFile(`${rootFolderHost}/apps/${id}/data/${file}`, `${storagePath}/app-data/${id}/data/${file}`);
|
await fs.promises.copyFile(`${ROOT_FOLDER}/apps/${id}/data/${file}`, `${STORAGE_FOLDER}/app-data/${id}/data/${file}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const processDir = async (p: string) => {
|
const processDir = async (p: string) => {
|
||||||
await fs.promises.mkdir(`${storagePath}/app-data/${id}/data/${p}`, { recursive: true });
|
await fs.promises.mkdir(`${STORAGE_FOLDER}/app-data/${id}/data/${p}`, { recursive: true });
|
||||||
const files = await fs.promises.readdir(`${rootFolderHost}/apps/${id}/data/${p}`);
|
const files = await fs.promises.readdir(`${ROOT_FOLDER}/apps/${id}/data/${p}`);
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
files.map(async (file) => {
|
files.map(async (file) => {
|
||||||
const fullPath = `${rootFolderHost}/apps/${id}/data/${p}/${file}`;
|
const fullPath = `${ROOT_FOLDER}/apps/${id}/data/${p}/${file}`;
|
||||||
|
|
||||||
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
||||||
await processDir(`${p}/${file}`);
|
await processDir(`${p}/${file}`);
|
||||||
|
@ -181,7 +176,7 @@ export const copyDataDir = async (id: string) => {
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
dataDir.map(async (file) => {
|
dataDir.map(async (file) => {
|
||||||
const fullPath = `${rootFolderHost}/apps/${id}/data/${file}`;
|
const fullPath = `${ROOT_FOLDER}/apps/${id}/data/${file}`;
|
||||||
|
|
||||||
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
if ((await fs.promises.lstat(fullPath)).isDirectory()) {
|
||||||
await processDir(file);
|
await processDir(file);
|
||||||
|
@ -192,7 +187,7 @@ export const copyDataDir = async (id: string) => {
|
||||||
);
|
);
|
||||||
|
|
||||||
// Remove any .gitkeep files from the app-data folder at any level
|
// Remove any .gitkeep files from the app-data folder at any level
|
||||||
if (await pathExists(`${storagePath}/app-data/${id}/data`)) {
|
if (await pathExists(`${STORAGE_FOLDER}/app-data/${id}/data`)) {
|
||||||
await execAsync(`find ${storagePath}/app-data/${id}/data -name .gitkeep -delete`).catch(() => {});
|
await execAsync(`find ${STORAGE_FOLDER}/app-data/${id}/data -name .gitkeep -delete`).catch(() => {});
|
||||||
}
|
}
|
||||||
};
|
};
|
|
@ -1,7 +1,7 @@
|
||||||
import webpush from 'web-push';
|
import webpush from 'web-push';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
import { STORAGE_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function reads the env file for the app with the provided id and returns a Map containing the key-value pairs of the environment variables.
|
* This function reads the env file for the app with the provided id and returns a Map containing the key-value pairs of the environment variables.
|
||||||
|
@ -11,7 +11,7 @@ import { getEnv } from '@/utils/environment/environment';
|
||||||
*/
|
*/
|
||||||
export const getAppEnvMap = async (appId: string) => {
|
export const getAppEnvMap = async (appId: string) => {
|
||||||
try {
|
try {
|
||||||
const envFile = await fs.promises.readFile(path.join(getEnv().storagePath, 'app-data', appId, 'app.env'));
|
const envFile = await fs.promises.readFile(path.join(STORAGE_FOLDER, 'app-data', appId, 'app.env'));
|
||||||
const envVars = envFile.toString().split('\n');
|
const envVars = envFile.toString().split('\n');
|
||||||
const envVarsMap = new Map<string, string>();
|
const envVarsMap = new Map<string, string>();
|
||||||
|
|
3
packages/worker/src/services/index.ts
Normal file
3
packages/worker/src/services/index.ts
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
export { AppExecutors } from './app/app.executors';
|
||||||
|
export { RepoExecutors } from './repo/repo.executors';
|
||||||
|
export { SystemExecutors } from './system/system.executors';
|
|
@ -1,18 +1,13 @@
|
||||||
import { getEnv } from 'src/utils/environment/environment';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { promisify } from 'util';
|
import { execAsync, pathExists } from '@runtipi/shared';
|
||||||
import { exec } from 'child_process';
|
import { getRepoHash, getRepoBaseUrlAndBranch } from './repo.helpers';
|
||||||
import { pathExists } from '@/utils/fs-helpers';
|
import { logger } from '@/lib/logger';
|
||||||
import { getRepoHash } from './repo.helpers';
|
|
||||||
import { fileLogger } from '@/utils/logger/file-logger';
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
export class RepoExecutors {
|
export class RepoExecutors {
|
||||||
private readonly logger;
|
private readonly logger;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.logger = fileLogger;
|
this.logger = logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -31,23 +26,32 @@ export class RepoExecutors {
|
||||||
/**
|
/**
|
||||||
* Given a repo url, clone it to the repos folder if it doesn't exist
|
* Given a repo url, clone it to the repos folder if it doesn't exist
|
||||||
*
|
*
|
||||||
* @param {string} repoUrl
|
* @param {string} url
|
||||||
*/
|
*/
|
||||||
public cloneRepo = async (repoUrl: string) => {
|
public cloneRepo = async (url: string) => {
|
||||||
try {
|
try {
|
||||||
const { rootFolderHost } = getEnv();
|
// We may have a potential branch computed in the hash (see getRepoBaseUrlAndBranch)
|
||||||
|
// so we do it here before splitting the url into repoUrl and branch
|
||||||
const repoHash = getRepoHash(repoUrl);
|
const repoHash = getRepoHash(url);
|
||||||
const repoPath = path.join(rootFolderHost, 'repos', repoHash);
|
const repoPath = path.join('/app', 'repos', repoHash);
|
||||||
|
|
||||||
if (await pathExists(repoPath)) {
|
if (await pathExists(repoPath)) {
|
||||||
this.logger.info(`Repo ${repoUrl} already exists`);
|
this.logger.info(`Repo ${url} already exists`);
|
||||||
return { success: true, message: '' };
|
return { success: true, message: '' };
|
||||||
}
|
}
|
||||||
|
|
||||||
this.logger.info(`Cloning repo ${repoUrl} to ${repoPath}`);
|
const [repoUrl, branch] = getRepoBaseUrlAndBranch(url);
|
||||||
|
|
||||||
await execAsync(`git clone ${repoUrl} ${repoPath}`);
|
let cloneCommand;
|
||||||
|
if (branch) {
|
||||||
|
this.logger.info(`Cloning repo ${repoUrl} on branch ${branch} to ${repoPath}`);
|
||||||
|
cloneCommand = `git clone -b ${branch} ${repoUrl} ${repoPath}`;
|
||||||
|
} else {
|
||||||
|
this.logger.info(`Cloning repo ${repoUrl} to ${repoPath}`);
|
||||||
|
cloneCommand = `git clone ${repoUrl} ${repoPath}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await execAsync(cloneCommand);
|
||||||
|
|
||||||
this.logger.info(`Cloned repo ${repoUrl} to ${repoPath}`);
|
this.logger.info(`Cloned repo ${repoUrl} to ${repoPath}`);
|
||||||
return { success: true, message: '' };
|
return { success: true, message: '' };
|
||||||
|
@ -63,10 +67,8 @@ export class RepoExecutors {
|
||||||
*/
|
*/
|
||||||
public pullRepo = async (repoUrl: string) => {
|
public pullRepo = async (repoUrl: string) => {
|
||||||
try {
|
try {
|
||||||
const { rootFolderHost } = getEnv();
|
|
||||||
|
|
||||||
const repoHash = getRepoHash(repoUrl);
|
const repoHash = getRepoHash(repoUrl);
|
||||||
const repoPath = path.join(rootFolderHost, 'repos', repoHash);
|
const repoPath = path.join('/app', 'repos', repoHash);
|
||||||
|
|
||||||
if (!(await pathExists(repoPath))) {
|
if (!(await pathExists(repoPath))) {
|
||||||
this.logger.info(`Repo ${repoUrl} does not exist`);
|
this.logger.info(`Repo ${repoUrl} does not exist`);
|
||||||
|
@ -96,11 +98,7 @@ export class RepoExecutors {
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.info(`Executing: git -C ${repoPath} fetch origin && git -C ${repoPath} reset --hard origin/${currentBranch}`);
|
this.logger.info(`Executing: git -C ${repoPath} fetch origin && git -C ${repoPath} reset --hard origin/${currentBranch}`);
|
||||||
await execAsync(`git -C ${repoPath} fetch origin && git -C ${repoPath} reset --hard origin/${currentBranch}`).then(({ stderr }) => {
|
await execAsync(`git -C ${repoPath} fetch origin && git -C ${repoPath} reset --hard origin/${currentBranch}`);
|
||||||
if (stderr) {
|
|
||||||
this.logger.error(`stderr: ${stderr}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.logger.info(`Pulled repo ${repoUrl} to ${repoPath}`);
|
this.logger.info(`Pulled repo ${repoUrl} to ${repoPath}`);
|
||||||
return { success: true, message: '' };
|
return { success: true, message: '' };
|
27
packages/worker/src/services/repo/repo.helpers.ts
Normal file
27
packages/worker/src/services/repo/repo.helpers.ts
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
import crypto from 'crypto';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a repo url, return a hash of it to be used as a folder name
|
||||||
|
*
|
||||||
|
* @param {string} repoUrl
|
||||||
|
*/
|
||||||
|
export const getRepoHash = (repoUrl: string) => {
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
hash.update(repoUrl);
|
||||||
|
return hash.digest('hex');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the base URL and branch from a repository URL.
|
||||||
|
* @param repoUrl The repository URL.
|
||||||
|
* @returns An array containing the base URL and branch, or just the base URL if no branch is found.
|
||||||
|
*/
|
||||||
|
export const getRepoBaseUrlAndBranch = (repoUrl: string) => {
|
||||||
|
const branchMatch = repoUrl.match(/^(.*)\/tree\/(.*)$/);
|
||||||
|
if (branchMatch) {
|
||||||
|
return [branchMatch[1], branchMatch[2]] ;
|
||||||
|
}
|
||||||
|
|
||||||
|
return [repoUrl, undefined] ;
|
||||||
|
};
|
60
packages/worker/src/services/system/system.executors.ts
Normal file
60
packages/worker/src/services/system/system.executors.ts
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import si from 'systeminformation';
|
||||||
|
import { logger } from '@/lib/logger';
|
||||||
|
import { ROOT_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
|
export class SystemExecutors {
|
||||||
|
private readonly logger;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleSystemError = (err: unknown) => {
|
||||||
|
if (err instanceof Error) {
|
||||||
|
this.logger.error(`An error occurred: ${err.message}`);
|
||||||
|
return { success: false, message: err.message };
|
||||||
|
}
|
||||||
|
this.logger.error(`An error occurred: ${err}`);
|
||||||
|
|
||||||
|
return { success: false, message: `An error occurred: ${err}` };
|
||||||
|
};
|
||||||
|
|
||||||
|
private getSystemLoad = async () => {
|
||||||
|
const { currentLoad } = await si.currentLoad();
|
||||||
|
|
||||||
|
const memResult = { total: 0, used: 0, available: 0 };
|
||||||
|
|
||||||
|
try {
|
||||||
|
const memInfo = await fs.promises.readFile('/host/proc/meminfo');
|
||||||
|
|
||||||
|
memResult.total = Number(memInfo.toString().match(/MemTotal:\s+(\d+)/)?.[1] ?? 0) * 1024;
|
||||||
|
memResult.available = Number(memInfo.toString().match(/MemAvailable:\s+(\d+)/)?.[1] ?? 0) * 1024;
|
||||||
|
memResult.used = memResult.total - memResult.available;
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.error(`Unable to read /host/proc/meminfo: ${e}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [disk0] = await si.fsSize();
|
||||||
|
|
||||||
|
return {
|
||||||
|
cpu: { load: currentLoad },
|
||||||
|
memory: memResult,
|
||||||
|
disk: { total: disk0?.size, used: disk0?.used, available: disk0?.available },
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
public systemInfo = async () => {
|
||||||
|
try {
|
||||||
|
const systemLoad = await this.getSystemLoad();
|
||||||
|
|
||||||
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'state', 'system-info.json'), JSON.stringify(systemLoad, null, 2));
|
||||||
|
await fs.promises.chmod(path.join(ROOT_FOLDER, 'state', 'system-info.json'), 0o777);
|
||||||
|
|
||||||
|
return { success: true, message: '' };
|
||||||
|
} catch (e) {
|
||||||
|
return this.handleSystemError(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,21 +1,13 @@
|
||||||
import { eventSchema } from '@runtipi/shared';
|
import { eventSchema } from '@runtipi/shared';
|
||||||
import { Worker } from 'bullmq';
|
import { Worker } from 'bullmq';
|
||||||
import { exec } from 'child_process';
|
import { AppExecutors, RepoExecutors, SystemExecutors } from '@/services';
|
||||||
import { promisify } from 'util';
|
import { logger } from '@/lib/logger';
|
||||||
import { AppExecutors, RepoExecutors, SystemExecutors } from '@/executors';
|
import { getEnv } from '@/lib/environment';
|
||||||
import { getEnv } from '@/utils/environment/environment';
|
|
||||||
import { getUserIds } from '@/utils/environment/user';
|
|
||||||
import { fileLogger } from '@/utils/logger/file-logger';
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
const runCommand = async (jobData: unknown) => {
|
const runCommand = async (jobData: unknown) => {
|
||||||
const { gid, uid } = getUserIds();
|
|
||||||
fileLogger.info(`Running command with uid ${uid} and gid ${gid}`);
|
|
||||||
|
|
||||||
const { installApp, startApp, stopApp, uninstallApp, updateApp, regenerateAppEnv } = new AppExecutors();
|
const { installApp, startApp, stopApp, uninstallApp, updateApp, regenerateAppEnv } = new AppExecutors();
|
||||||
const { cloneRepo, pullRepo } = new RepoExecutors();
|
const { cloneRepo, pullRepo } = new RepoExecutors();
|
||||||
const { systemInfo, restart, update } = new SystemExecutors();
|
const { systemInfo } = new SystemExecutors();
|
||||||
|
|
||||||
const event = eventSchema.safeParse(jobData);
|
const event = eventSchema.safeParse(jobData);
|
||||||
|
|
||||||
|
@ -34,11 +26,11 @@ const runCommand = async (jobData: unknown) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.command === 'stop') {
|
if (data.command === 'stop') {
|
||||||
({ success, message } = await stopApp(data.appid, data.form));
|
({ success, message } = await stopApp(data.appid, data.form, data.skipEnv));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.command === 'start') {
|
if (data.command === 'start') {
|
||||||
({ success, message } = await startApp(data.appid, data.form));
|
({ success, message } = await startApp(data.appid, data.form, data.skipEnv));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.command === 'uninstall') {
|
if (data.command === 'uninstall') {
|
||||||
|
@ -64,38 +56,11 @@ const runCommand = async (jobData: unknown) => {
|
||||||
if (data.command === 'system_info') {
|
if (data.command === 'system_info') {
|
||||||
({ success, message } = await systemInfo());
|
({ success, message } = await systemInfo());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.command === 'restart') {
|
|
||||||
({ success, message } = await restart());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.command === 'update') {
|
|
||||||
({ success, message } = await update(data.version));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return { success, message };
|
return { success, message };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const killOtherWorkers = async () => {
|
|
||||||
const { stdout } = await execAsync('ps aux | grep "index.js watch" | grep -v grep | awk \'{print $2}\'');
|
|
||||||
const { stdout: stdoutInherit } = await execAsync('ps aux | grep "runtipi-cli watch" | grep -v grep | awk \'{print $2}\'');
|
|
||||||
|
|
||||||
fileLogger.info(`Killing other workers with pids ${stdout} and ${stdoutInherit}`);
|
|
||||||
|
|
||||||
const pids = stdout.split('\n').filter((pid: string) => pid !== '');
|
|
||||||
const pidsInherit = stdoutInherit.split('\n').filter((pid: string) => pid !== '');
|
|
||||||
|
|
||||||
pids.concat(pidsInherit).forEach((pid) => {
|
|
||||||
fileLogger.info(`Killing worker with pid ${pid}`);
|
|
||||||
try {
|
|
||||||
process.kill(Number(pid));
|
|
||||||
} catch (e) {
|
|
||||||
fileLogger.error(`Error killing worker with pid ${pid}: ${e}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start the worker for the events queue
|
* Start the worker for the events queue
|
||||||
*/
|
*/
|
||||||
|
@ -103,27 +68,27 @@ export const startWorker = async () => {
|
||||||
const worker = new Worker(
|
const worker = new Worker(
|
||||||
'events',
|
'events',
|
||||||
async (job) => {
|
async (job) => {
|
||||||
fileLogger.info(`Processing job ${job.id} with data ${JSON.stringify(job.data)}`);
|
logger.info(`Processing job ${job.id} with data ${JSON.stringify(job.data)}`);
|
||||||
const { message, success } = await runCommand(job.data);
|
const { message, success } = await runCommand(job.data);
|
||||||
|
|
||||||
return { success, stdout: message };
|
return { success, stdout: message };
|
||||||
},
|
},
|
||||||
{ connection: { host: '127.0.0.1', port: 6379, password: getEnv().redisPassword, connectTimeout: 60000 } },
|
{ connection: { host: getEnv().redisHost, port: 6379, password: getEnv().redisPassword, connectTimeout: 60000 }, removeOnComplete: { count: 200 }, removeOnFail: { count: 500 } },
|
||||||
);
|
);
|
||||||
|
|
||||||
worker.on('ready', () => {
|
worker.on('ready', () => {
|
||||||
fileLogger.info('Worker is ready');
|
logger.info('Worker is ready');
|
||||||
});
|
});
|
||||||
|
|
||||||
worker.on('completed', (job) => {
|
worker.on('completed', (job) => {
|
||||||
fileLogger.info(`Job ${job.id} completed with result: ${JSON.stringify(job.returnvalue)}`);
|
logger.info(`Job ${job.id} completed with result:`, JSON.stringify(job.returnvalue));
|
||||||
});
|
});
|
||||||
|
|
||||||
worker.on('failed', (job) => {
|
worker.on('failed', (job) => {
|
||||||
fileLogger.error(`Job ${job?.id} failed with reason ${job?.failedReason}`);
|
logger.error(`Job ${job?.id} failed with reason ${job?.failedReason}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
worker.on('error', async (e) => {
|
worker.on('error', async (e) => {
|
||||||
fileLogger.error(`Worker error: ${e}`);
|
logger.debug(`Worker error: ${e}`);
|
||||||
});
|
});
|
||||||
};
|
};
|
38
packages/worker/tests/apps.factory.ts
Normal file
38
packages/worker/tests/apps.factory.ts
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import { faker } from '@faker-js/faker';
|
||||||
|
import fs from 'fs';
|
||||||
|
import { APP_CATEGORIES, AppInfo, appInfoSchema } from '@runtipi/shared';
|
||||||
|
import { ROOT_FOLDER, STORAGE_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
|
export const createAppConfig = (props?: Partial<AppInfo>, isInstalled = true) => {
|
||||||
|
const appInfo = appInfoSchema.parse({
|
||||||
|
id: faker.string.alphanumeric(32),
|
||||||
|
available: true,
|
||||||
|
port: faker.number.int({ min: 30, max: 65535 }),
|
||||||
|
name: faker.string.alphanumeric(32),
|
||||||
|
description: faker.string.alphanumeric(32),
|
||||||
|
tipi_version: 1,
|
||||||
|
short_desc: faker.string.alphanumeric(32),
|
||||||
|
author: faker.string.alphanumeric(32),
|
||||||
|
source: faker.internet.url(),
|
||||||
|
categories: [APP_CATEGORIES.AUTOMATION],
|
||||||
|
...props,
|
||||||
|
});
|
||||||
|
|
||||||
|
const mockFiles: Record<string, string | string[]> = {};
|
||||||
|
mockFiles[`${ROOT_FOLDER}/.env`] = 'TEST=test';
|
||||||
|
mockFiles[`${ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfoSchema.parse(appInfo));
|
||||||
|
mockFiles[`${ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/docker-compose.yml`] = 'compose';
|
||||||
|
mockFiles[`${ROOT_FOLDER}/repos/repo-id/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||||
|
|
||||||
|
if (isInstalled) {
|
||||||
|
mockFiles[`${ROOT_FOLDER}/apps/${appInfo.id}/config.json`] = JSON.stringify(appInfoSchema.parse(appInfo));
|
||||||
|
mockFiles[`${ROOT_FOLDER}/apps/${appInfo.id}/docker-compose.yml`] = 'compose';
|
||||||
|
mockFiles[`${ROOT_FOLDER}/apps/${appInfo.id}/metadata/description.md`] = 'md desc';
|
||||||
|
mockFiles[`${STORAGE_FOLDER}/app-data/${appInfo.id}/data/test.txt`] = 'data';
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-expect-error - custom mock method
|
||||||
|
fs.__applyMockFiles(mockFiles);
|
||||||
|
|
||||||
|
return appInfo;
|
||||||
|
};
|
41
packages/worker/tests/mocks/fs.ts
Normal file
41
packages/worker/tests/mocks/fs.ts
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import { fs, vol } from 'memfs';
|
||||||
|
|
||||||
|
const copyFolderRecursiveSync = (src: string, dest: string) => {
|
||||||
|
const exists = vol.existsSync(src);
|
||||||
|
const stats = vol.statSync(src);
|
||||||
|
const isDirectory = exists && stats.isDirectory();
|
||||||
|
if (isDirectory) {
|
||||||
|
vol.mkdirSync(dest, { recursive: true });
|
||||||
|
vol.readdirSync(src).forEach((childItemName) => {
|
||||||
|
copyFolderRecursiveSync(`${src}/${childItemName}`, `${dest}/${childItemName}`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
vol.copyFileSync(src, dest);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const fsMock = {
|
||||||
|
default: {
|
||||||
|
...fs,
|
||||||
|
promises: {
|
||||||
|
...fs.promises,
|
||||||
|
cp: copyFolderRecursiveSync,
|
||||||
|
},
|
||||||
|
copySync: (src: string, dest: string) => {
|
||||||
|
copyFolderRecursiveSync(src, dest);
|
||||||
|
},
|
||||||
|
__resetAllMocks: () => {
|
||||||
|
vol.reset();
|
||||||
|
},
|
||||||
|
__applyMockFiles: (newMockFiles: Record<string, string>) => {
|
||||||
|
// Create folder tree
|
||||||
|
vol.fromJSON(newMockFiles, 'utf8');
|
||||||
|
},
|
||||||
|
__createMockFiles: (newMockFiles: Record<string, string>) => {
|
||||||
|
vol.reset();
|
||||||
|
// Create folder tree
|
||||||
|
vol.fromJSON(newMockFiles, 'utf8');
|
||||||
|
},
|
||||||
|
__printVol: () => console.log(vol.toTree()),
|
||||||
|
},
|
||||||
|
};
|
42
packages/worker/tests/vite.setup.ts
Normal file
42
packages/worker/tests/vite.setup.ts
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { vi, beforeEach } from 'vitest';
|
||||||
|
import { getEnv } from '@/lib/environment';
|
||||||
|
import { ROOT_FOLDER } from '@/config/constants';
|
||||||
|
|
||||||
|
vi.mock('@runtipi/shared', async (importOriginal) => {
|
||||||
|
const mod = (await importOriginal()) as object;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...mod,
|
||||||
|
createLogger: vi.fn().mockReturnValue({
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
}),
|
||||||
|
FileLogger: vi.fn().mockImplementation(() => ({
|
||||||
|
flush: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('fs', async () => {
|
||||||
|
const { fsMock } = await import('@/tests/mocks/fs');
|
||||||
|
return {
|
||||||
|
...fsMock,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// @ts-expect-error - custom mock method
|
||||||
|
fs.__resetAllMocks();
|
||||||
|
|
||||||
|
const { appsRepoId } = getEnv();
|
||||||
|
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'state'), { recursive: true });
|
||||||
|
await fs.promises.writeFile(path.join(ROOT_FOLDER, 'state', 'seed'), 'seed');
|
||||||
|
await fs.promises.mkdir(path.join(ROOT_FOLDER, 'repos', appsRepoId, 'apps'), { recursive: true });
|
||||||
|
});
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue