mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
746 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
030cb124e5 | ||
|
|
fd363ec017 | ||
|
|
8b813fb07a | ||
|
|
40da3ff9fe | ||
|
|
2315192f4b | ||
|
|
0faa1540f4 | ||
|
|
00cab67e73 | ||
|
|
b92bc9eebb | ||
|
|
1905db16e8 | ||
|
|
3e9cf7285b | ||
|
|
6fdbc572fe | ||
|
|
3fd50ebb12 | ||
|
|
0eb7f4526e | ||
|
|
646d92757a | ||
|
|
51efa01b11 | ||
|
|
dc4a63ef92 | ||
|
|
1b717ac091 | ||
|
|
e93d97f2bc | ||
|
|
45c904e876 | ||
|
|
880865f1f2 | ||
|
|
8e42203b89 | ||
|
|
2bd91fa970 | ||
|
|
a3fd95020d | ||
|
|
e5b1ce4eef | ||
|
|
531973baab | ||
|
|
b6e6a1ccf1 | ||
|
|
1140afe2c9 | ||
|
|
f8f17832de | ||
|
|
caaf030517 | ||
|
|
106aee31bd | ||
|
|
48fa4ff245 | ||
|
|
d75d2880e5 | ||
|
|
ec907b0ce4 | ||
|
|
2cda0b22c2 | ||
|
|
a0076db42e | ||
|
|
a37cf49c2a | ||
|
|
c4833c3cc2 | ||
|
|
d03fbd9224 | ||
|
|
5998212b82 | ||
|
|
62ccab22d6 | ||
|
|
5ccea1cfcc | ||
|
|
8ccb1bd34c | ||
|
|
c1a48dcf1e | ||
|
|
11d74c0c1f | ||
|
|
8290ee856f | ||
|
|
08332c8321 | ||
|
|
046f738b7d | ||
|
|
07708155ac | ||
|
|
df5e23c7c2 | ||
|
|
41adc02801 | ||
|
|
72b650b086 | ||
|
|
06fe3f33c0 | ||
|
|
cbabf7fc51 | ||
|
|
6aeafda604 | ||
|
|
30d656698e | ||
|
|
94d1af01df | ||
|
|
af97d399b6 | ||
|
|
2f90fd1fe6 | ||
|
|
c05a140b0b | ||
|
|
cbfb9a3844 | ||
|
|
5a227f70c6 | ||
|
|
44a102443d | ||
|
|
cf7fdf198d | ||
|
|
68f2f4f978 | ||
|
|
029b623f08 | ||
|
|
fe3702847a | ||
|
|
e9b852a30e | ||
|
|
1d4e5df5a2 | ||
|
|
5e14b72fe4 | ||
|
|
8ebff72cde | ||
|
|
e16643c48c | ||
|
|
65c8f55ee6 | ||
|
|
fbc81ab3eb | ||
|
|
a4d56fd79a | ||
|
|
ce45cb8aca | ||
|
|
7f8428cd17 | ||
|
|
14d79031c1 | ||
|
|
b8aa7b6d08 | ||
|
|
397ca7f20e | ||
|
|
e10b76a46b | ||
|
|
b46566280d | ||
|
|
3ab6a231eb | ||
|
|
2bc2ae9b6e | ||
|
|
2b28f8bd8f | ||
|
|
dcdac29135 | ||
|
|
591ee29e0d | ||
|
|
625e71ab08 | ||
|
|
b0af54587b | ||
|
|
be3080df08 | ||
|
|
04685c9f9d | ||
|
|
1a83f2635f | ||
|
|
630aa45c87 | ||
|
|
0c3a381d1f | ||
|
|
ffac7c5c87 | ||
|
|
410800e81c | ||
|
|
9481beb61f | ||
|
|
141f2481a7 | ||
|
|
ea18f25adc | ||
|
|
9018184747 | ||
|
|
4fc2dd55f5 | ||
|
|
5ef9a282eb | ||
|
|
93a6518974 | ||
|
|
07aa285b27 | ||
|
|
bf01e9e29f | ||
|
|
d70672ba4b | ||
|
|
5eeb519ed6 | ||
|
|
5f047e4adf | ||
|
|
56b9a376bd | ||
|
|
0a1d31a188 | ||
|
|
64c9fb9a1b | ||
|
|
47aad15cd5 | ||
|
|
260a47a366 | ||
|
|
fd4bbe17f0 | ||
|
|
25ff637703 | ||
|
|
f571453696 | ||
|
|
5cd7533972 | ||
|
|
3a252509d0 | ||
|
|
2bd3802a6f | ||
|
|
ce2757f514 | ||
|
|
8419cdf604 | ||
|
|
907c2414ae | ||
|
|
f82207564f | ||
|
|
991a09838c | ||
|
|
25df4bfd85 | ||
|
|
d2f89d001b | ||
|
|
1971f227fd | ||
|
|
c1adffe260 | ||
|
|
e725887a55 | ||
|
|
5bf79b75b0 | ||
|
|
6926975e40 | ||
|
|
978a01c968 | ||
|
|
f421f5ee84 | ||
|
|
383831c7b8 | ||
|
|
41329facf7 | ||
|
|
7d3c644148 | ||
|
|
7fab9b5930 | ||
|
|
58763ef84c | ||
|
|
0e6abf172b | ||
|
|
9e681ece41 | ||
|
|
28f87a306d | ||
|
|
23e8833208 | ||
|
|
03962663c2 | ||
|
|
cc2ec55c4d | ||
|
|
ff2c38aa16 | ||
|
|
b5a9a2cea8 | ||
|
|
cd3f661f7e | ||
|
|
41bf6b5b86 | ||
|
|
a4e7c85184 | ||
|
|
19aca9ab35 | ||
|
|
08704c289a | ||
|
|
2224c22c6e | ||
|
|
b281889acd | ||
|
|
cfc50a27b0 | ||
|
|
ed5f21da6a | ||
|
|
78f3eb81dd | ||
|
|
6a833934ce | ||
|
|
45bf6f77d1 | ||
|
|
a1b3b7b687 | ||
|
|
7ebcad6abb | ||
|
|
fed6d2bf07 | ||
|
|
bea4943e9f | ||
|
|
1979e431b8 | ||
|
|
9bead1d6b4 | ||
|
|
56c4295e16 | ||
|
|
7c7b5a61e5 | ||
|
|
abaa13fda8 | ||
|
|
042bfeddbb | ||
|
|
f45ab067ce | ||
|
|
97a6f04aaa | ||
|
|
417c01d6e0 | ||
|
|
b2e7435d0f | ||
|
|
73c9cb1d51 | ||
|
|
41c5dd3b53 | ||
|
|
bb0c93dc2f | ||
|
|
7953c1df30 | ||
|
|
c3f4245164 | ||
|
|
369001febb | ||
|
|
7ec296be6b | ||
|
|
d2f5a58f3b | ||
|
|
f4315144af | ||
|
|
e92775887d | ||
|
|
a5f1b4b675 | ||
|
|
157e5fd7aa | ||
|
|
5e7e1c11c7 | ||
|
|
e8516bc831 | ||
|
|
e3f78a1cf9 | ||
|
|
3449e0f8fc | ||
|
|
66af12f9b5 | ||
|
|
13acf09dcc | ||
|
|
ce71dccbc1 | ||
|
|
d9ba1a0b5c | ||
|
|
0b709c93a8 | ||
|
|
1657e5a151 | ||
|
|
a165b21950 | ||
|
|
0d0715a340 | ||
|
|
76754ded79 | ||
|
|
4da27a46a2 | ||
|
|
039953588e | ||
|
|
b8b4f559db | ||
|
|
2b0df270df | ||
|
|
b96c1a23ec | ||
|
|
f779b3bb54 | ||
|
|
6462982d12 | ||
|
|
84b4cc5d54 | ||
|
|
1bd2ccbc16 | ||
|
|
3abe1610bf | ||
|
|
61716738ed | ||
|
|
4e819f6eba | ||
|
|
fedb38f2bc | ||
|
|
aae108032c | ||
|
|
020013683b | ||
|
|
70de2538e2 | ||
|
|
9f581c82a9 | ||
|
|
eb2e07afc5 | ||
|
|
9c47b8495c | ||
|
|
2f8d0ee60c | ||
|
|
5bf14f4639 | ||
|
|
9da08d600b | ||
|
|
4d47eab07c | ||
|
|
f2061c5c25 | ||
|
|
430fc66ed7 | ||
|
|
bcb84b8126 | ||
|
|
dd83e86bc3 | ||
|
|
3e8a8364dc | ||
|
|
be41c0dd02 | ||
|
|
a17b7a564e | ||
|
|
f3cdda29bc | ||
|
|
de37ee9f1c | ||
|
|
8212868b92 | ||
|
|
b44d8578d9 | ||
|
|
0358cf2de2 | ||
|
|
94da008a47 | ||
|
|
456b1b8074 | ||
|
|
78e6a7d1d3 | ||
|
|
76dc7ffb68 | ||
|
|
211aff7170 | ||
|
|
bcacefb841 | ||
|
|
4505ad37d8 | ||
|
|
18cf57f33c | ||
|
|
9f2f5b40c3 | ||
|
|
8a401f50cb | ||
|
|
51a5b3b602 | ||
|
|
68f9bca054 | ||
|
|
e9e92c6e9e | ||
|
|
008cfdba09 | ||
|
|
9973197fa5 | ||
|
|
ec3b94cf96 | ||
|
|
c4cb92c78d | ||
|
|
c390f82246 | ||
|
|
b4f98e24a1 | ||
|
|
e042c5cfde | ||
|
|
faeae8fd6c | ||
|
|
ae4942ba29 | ||
|
|
fd652bfce6 | ||
|
|
3d72167721 | ||
|
|
ba284bef9e | ||
|
|
d18bb9cc74 | ||
|
|
a7ed3e58db | ||
|
|
8405ebd28d | ||
|
|
352bb65125 | ||
|
|
fe2cc5a99a | ||
|
|
7a2f29f6a3 | ||
|
|
9a05bfa899 | ||
|
|
39fa64e20d | ||
|
|
3a835b420e | ||
|
|
82f7633c3a | ||
|
|
9fdac2741a | ||
|
|
8fb5260809 | ||
|
|
e08ec12d26 | ||
|
|
1202e00a21 | ||
|
|
4ba2205af4 | ||
|
|
09841ad4cb | ||
|
|
d2dcd0abc8 | ||
|
|
fe9d0503fb | ||
|
|
8e9e6607e5 | ||
|
|
e1efd9355f | ||
|
|
ca705bbf89 | ||
|
|
b70fe09d17 | ||
|
|
d7d570393f | ||
|
|
41ca265e5a | ||
|
|
03cde08d67 | ||
|
|
5684674bd7 | ||
|
|
4fe919f2ea | ||
|
|
c8c23c53ef | ||
|
|
b1c25e98d7 | ||
|
|
7ab5a4bfcf | ||
|
|
a3ee57995c | ||
|
|
32020fd336 | ||
|
|
f1313b6468 | ||
|
|
3ef093c7e6 | ||
|
|
f5dfaa81d3 | ||
|
|
fcf206a081 | ||
|
|
9790d2b613 | ||
|
|
c39cb42601 | ||
|
|
0ead17ab70 | ||
|
|
4a6062522e | ||
|
|
201fa82efc | ||
|
|
d28433ee64 | ||
|
|
cc348bf0f5 | ||
|
|
b023d65fcf | ||
|
|
bd15d85732 | ||
|
|
b4bbd22781 | ||
|
|
d4c972584a | ||
|
|
edef4bd4a0 | ||
|
|
448611039c | ||
|
|
305fab488e | ||
|
|
38f0546f05 | ||
|
|
8cb679711d | ||
|
|
4d11867500 | ||
|
|
8232a7468b | ||
|
|
03e7af12be | ||
|
|
39f2e28a11 | ||
|
|
53947d805b | ||
|
|
15f8e44237 | ||
|
|
5ce1bc1ec5 | ||
|
|
c36bd34a1a | ||
|
|
781d034484 | ||
|
|
e4f701b148 | ||
|
|
5160d0780e | ||
|
|
8cd561b8cc | ||
|
|
b8b57bc48b | ||
|
|
58406f055e | ||
|
|
b049297082 | ||
|
|
a284928352 | ||
|
|
fe787538e3 | ||
|
|
360fb5ea37 | ||
|
|
45af5cbef8 | ||
|
|
463dacbe59 | ||
|
|
13891110ce | ||
|
|
01e0fb70c9 | ||
|
|
c1c25d59c8 | ||
|
|
6ac54e17f4 | ||
|
|
a82805846f | ||
|
|
a53bda1436 | ||
|
|
6309074844 | ||
|
|
b80e0d15fb | ||
|
|
7a0d151467 | ||
|
|
c55505af6c | ||
|
|
a788b7bc13 | ||
|
|
5f27fc0770 | ||
|
|
b814c6e563 | ||
|
|
8f58b14629 | ||
|
|
269250ef3d | ||
|
|
a3241516cb | ||
|
|
943300509b | ||
|
|
92d1f5aa55 | ||
|
|
7a74ba1796 | ||
|
|
614eb923d8 | ||
|
|
066f5b25e0 | ||
|
|
18f7ab1b95 | ||
|
|
78293340cc | ||
|
|
c47457a17f | ||
|
|
d00629b627 | ||
|
|
ddfbda6f80 | ||
|
|
d910b21185 | ||
|
|
b60b832426 | ||
|
|
adfc976f41 | ||
|
|
1b43976ff0 | ||
|
|
321fb019eb | ||
|
|
f6858a68e0 | ||
|
|
741db1778b | ||
|
|
809f40dec9 | ||
|
|
f3b5de4697 | ||
|
|
fe17e2eaba | ||
|
|
22ef0b5d29 | ||
|
|
823279fb60 | ||
|
|
19f661706d | ||
|
|
986c5b7133 | ||
|
|
4e334d4fff | ||
|
|
dcf7f92aab | ||
|
|
f56361c0ca | ||
|
|
4946ca2d91 | ||
|
|
f6a91cb53c | ||
|
|
726fbbb52a | ||
|
|
29d2278579 | ||
|
|
72ceeff022 | ||
|
|
54d65ec011 | ||
|
|
96aef5c4a6 | ||
|
|
7b64166fb0 | ||
|
|
1f5908e0b8 | ||
|
|
a4562d18b6 | ||
|
|
875e232199 | ||
|
|
80f95a4674 | ||
|
|
17d56aa972 | ||
|
|
f4ba60cf8f | ||
|
|
0b8a648f13 | ||
|
|
2576a3af2c | ||
|
|
2e6c73fa3c | ||
|
|
b8d8ee4560 | ||
|
|
d9b74ada84 | ||
|
|
01b058151b | ||
|
|
989d952f35 | ||
|
|
908af3e024 | ||
|
|
819157fda1 | ||
|
|
5a4458e93f | ||
|
|
1fbd403f34 | ||
|
|
098e519c55 | ||
|
|
3ef4a242f9 | ||
|
|
ad3044dce1 | ||
|
|
e40541d831 | ||
|
|
2786e7dbaf | ||
|
|
196d681a63 | ||
|
|
d2353e3c35 | ||
|
|
2475031f88 | ||
|
|
cd15e68adc | ||
|
|
27431f779d | ||
|
|
b9b5a2faeb | ||
|
|
e471b11d3b | ||
|
|
a742a3d2e3 | ||
|
|
c615f6c07e | ||
|
|
a6ebfb08f7 | ||
|
|
2b0d162226 | ||
|
|
2c5f09a8bb | ||
|
|
ef073e586b | ||
|
|
82bfdb87e3 | ||
|
|
767e7b80cb | ||
|
|
8d26ea9063 | ||
|
|
1a7c4310d0 | ||
|
|
4e8fe79e2b | ||
|
|
a8c5551292 | ||
|
|
2bf73109b2 | ||
|
|
f0ab3750bd | ||
|
|
58a11e37fe | ||
|
|
927bf46304 | ||
|
|
6b89857697 | ||
|
|
b72e5ccef6 | ||
|
|
6617b7811b | ||
|
|
e1c1988db4 | ||
|
|
af99ea4678 | ||
|
|
a6d5316090 | ||
|
|
f5e7a84fa6 | ||
|
|
c013764b61 | ||
|
|
2320ab0dfc | ||
|
|
1281a0f7e4 | ||
|
|
d8350cd4ee | ||
|
|
e3b7c23ed9 | ||
|
|
eae1ea21d6 | ||
|
|
541aa76b64 | ||
|
|
7b8555d524 | ||
|
|
fdf998c181 | ||
|
|
3d6b343adc | ||
|
|
e338cecc14 | ||
|
|
e5537a33fb | ||
|
|
35384deb68 | ||
|
|
547ca60c2a | ||
|
|
376f6f7455 | ||
|
|
abe92dedff | ||
|
|
4b521ceedc | ||
|
|
6dfcb9e52b | ||
|
|
335e3216e2 | ||
|
|
5b22bb4818 | ||
|
|
0097004882 | ||
|
|
1bc9e4c2d3 | ||
|
|
36c7e1a3c3 | ||
|
|
c6b4d04e26 | ||
|
|
fa6cf068c7 | ||
|
|
7c273a3a48 | ||
|
|
3de2ea1523 | ||
|
|
c5c9f84503 | ||
|
|
16ea9a3e07 | ||
|
|
48f952c798 | ||
|
|
f78ea5de07 | ||
|
|
5adbd5e784 | ||
|
|
5b2afa79d7 | ||
|
|
dc4e6d02b7 | ||
|
|
8ae61c8f78 | ||
|
|
684b8e0914 | ||
|
|
7c3314abae | ||
|
|
ab9f8ff356 | ||
|
|
892d8cd5c1 | ||
|
|
8b8b45778d | ||
|
|
6655fb182c | ||
|
|
0926d40247 | ||
|
|
ddc4d36688 | ||
|
|
53e1f22eb1 | ||
|
|
3d2a34737b | ||
|
|
ebde77008c | ||
|
|
3d27fd04ba | ||
|
|
d9fcaf3473 | ||
|
|
d266f761aa | ||
|
|
1d01405412 | ||
|
|
7c62eb5bd6 | ||
|
|
4dcc76d366 | ||
|
|
d2fad19a11 | ||
|
|
7c92c4c964 | ||
|
|
5a71d33236 | ||
|
|
1b4db4f793 | ||
|
|
c084b22815 | ||
|
|
acacef95cd | ||
|
|
5d722183d3 | ||
|
|
ac19ea5407 | ||
|
|
d19b05b970 | ||
|
|
a0795136ac | ||
|
|
d2566e345a | ||
|
|
66cd7cf90e | ||
|
|
9a599981ef | ||
|
|
f51f7bc82a | ||
|
|
dbcbac0137 | ||
|
|
e722f8a87c | ||
|
|
61679749eb | ||
|
|
23e12c9c44 | ||
|
|
6da78cd3e5 | ||
|
|
78ce8100a3 | ||
|
|
76ba338b45 | ||
|
|
823fe2deb2 | ||
|
|
cb90f692f2 | ||
|
|
0325343ede | ||
|
|
69d1556a1d | ||
|
|
2daa043840 | ||
|
|
f340ca9d05 | ||
|
|
02abd038fa | ||
|
|
b9da68ec28 | ||
|
|
88b3910d80 | ||
|
|
160412f6e4 | ||
|
|
59a86b25fc | ||
|
|
49e58b39f5 | ||
|
|
58e0757bbd | ||
|
|
5ff4197572 | ||
|
|
b56e28d27a | ||
|
|
c3d39e1dd4 | ||
|
|
716aa36bfd | ||
|
|
f01460170e | ||
|
|
a414ce282d | ||
|
|
6c32f3b130 | ||
|
|
4cf907c572 | ||
|
|
b28baaa5aa | ||
|
|
980dea64e0 | ||
|
|
c340f6436f | ||
|
|
54376fd105 | ||
|
|
ef006578b2 | ||
|
|
b0b1ee0c60 | ||
|
|
4e2026aa2d | ||
|
|
e0e50b4bd5 | ||
|
|
c9b52f1310 | ||
|
|
0195213dfb | ||
|
|
d6225cbde3 | ||
|
|
7b4c194b97 | ||
|
|
a5ecff24a3 | ||
|
|
c9c003dc9b | ||
|
|
fd95936219 | ||
|
|
15a3fd4456 | ||
|
|
df896542e4 | ||
|
|
8927e81274 | ||
|
|
340f061827 | ||
|
|
15cbac97c2 | ||
|
|
bb32d0f7d1 | ||
|
|
c370fba9ba | ||
|
|
6e32421172 | ||
|
|
6643687c0a | ||
|
|
ed01e78d77 | ||
|
|
93aed52f88 | ||
|
|
bb6d1fd6a3 | ||
|
|
6e33179fc2 | ||
|
|
277fd167cf | ||
|
|
98e8d5170b | ||
|
|
11ee1651ae | ||
|
|
0dfcf9b1e6 | ||
|
|
08f57ac5bc | ||
|
|
7095e781e9 | ||
|
|
df18b93809 | ||
|
|
0c2e028b38 | ||
|
|
80cb1bc129 | ||
|
|
74c1cb51f6 | ||
|
|
2e864bddf9 | ||
|
|
e60ae91b5d | ||
|
|
d606cd86a0 | ||
|
|
bc463c37f4 | ||
|
|
76c1480903 | ||
|
|
6f312caf8b | ||
|
|
980d8d374f | ||
|
|
c49b34942f | ||
|
|
fcfa8717a5 | ||
|
|
954a265965 | ||
|
|
69845a020a | ||
|
|
22200fd8a7 | ||
|
|
add441675d | ||
|
|
d3d9754277 | ||
|
|
aa5e2edbc5 | ||
|
|
310b099ecf | ||
|
|
1cfaef911c | ||
|
|
b931c5f638 | ||
|
|
7c683668eb | ||
|
|
cab7ac7d58 | ||
|
|
15e69c538a | ||
|
|
31ee938b66 | ||
|
|
e51a8d43d9 | ||
|
|
64cd5b6e4b | ||
|
|
6c9ef34905 | ||
|
|
aa89019236 | ||
|
|
df58fcee16 | ||
|
|
ea3ffc429f | ||
|
|
2efca7a2b5 | ||
|
|
9db448a5e2 | ||
|
|
feee90beef | ||
|
|
906a63b6b5 | ||
|
|
2ce64ac213 | ||
|
|
4d8bf57135 | ||
|
|
c5348ce4b3 | ||
|
|
7f87c03f97 | ||
|
|
9469f148ff | ||
|
|
ffb7dc4ec2 | ||
|
|
242b8fa746 | ||
|
|
50cae5ac3b | ||
|
|
6a71233eb2 | ||
|
|
1aff8933c9 | ||
|
|
0ed87a5dfc | ||
|
|
24a6bcbd1e | ||
|
|
ca7f3da19d | ||
|
|
bf047e2a3c | ||
|
|
4454287be9 | ||
|
|
3bd2183655 | ||
|
|
1f7080e8f8 | ||
|
|
8b20761e8b | ||
|
|
655d0b5d5f | ||
|
|
91849cdd3a | ||
|
|
df25a694c3 | ||
|
|
eabaca145e | ||
|
|
2f0e458765 | ||
|
|
ff8037f231 | ||
|
|
a116028e1b | ||
|
|
e606a02b29 | ||
|
|
531c712ea5 | ||
|
|
3ae7624361 | ||
|
|
fed83462fa | ||
|
|
58c9f937c5 | ||
|
|
5d14b9209d | ||
|
|
305a95fa74 | ||
|
|
b29c1e702a | ||
|
|
b04d75ab08 | ||
|
|
25abfaadb9 | ||
|
|
1df81b8698 | ||
|
|
4487846fd7 | ||
|
|
86918f5160 | ||
|
|
bc723b3f15 | ||
|
|
1881e646d4 | ||
|
|
aa98808a1a | ||
|
|
f9a2232703 | ||
|
|
19d6be8663 | ||
|
|
0eb7c890ad | ||
|
|
7bfa68aa58 | ||
|
|
857a38050e | ||
|
|
c5b7f92caf | ||
|
|
df31ffd7fb | ||
|
|
0df0322d36 | ||
|
|
260552322d | ||
|
|
88ef6496a2 | ||
|
|
bdf123bf7b | ||
|
|
8fc3760eef | ||
|
|
5656f6f709 | ||
|
|
53e7e8b77e | ||
|
|
b990915b7a | ||
|
|
15b7822ffd | ||
|
|
cfa28419cb | ||
|
|
30ef0d2a3a | ||
|
|
755f99200a | ||
|
|
7af79ed3a2 | ||
|
|
2971e14269 | ||
|
|
01954aaf30 | ||
|
|
da018a8f2a | ||
|
|
77400bbbb0 | ||
|
|
3c3333d3df | ||
|
|
4963bd4144 | ||
|
|
b4a418dded | ||
|
|
a724b0daee | ||
|
|
88aa620cb4 | ||
|
|
70d3448110 | ||
|
|
09a1a406a6 | ||
|
|
40939d0b7f | ||
|
|
aec1d184c8 | ||
|
|
69d3cb5dd8 | ||
|
|
3deff162bb | ||
|
|
4ed54568d3 | ||
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c | ||
|
|
9c173d1de0 | ||
|
|
e11b6d74ed | ||
|
|
c7efe899fa | ||
|
|
adcd68c1ab | ||
|
|
23a4ebb74a | ||
|
|
cccb9a5fec | ||
|
|
b416e3ab3e | ||
|
|
e16b7d65d4 | ||
|
|
3744c64459 |
@@ -1,4 +1,16 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
.routify
|
||||
.pnpm-store
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
@@ -1,35 +1,8 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
COOLIFY_APP_ID=
|
||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||
COOLIFY_SENTRY_DSN=
|
||||
COOLIFY_IS_ON=docker
|
||||
COOLIFY_WHITE_LABELED=false
|
||||
COOLIFY_WHITE_LABELED_ICON=
|
||||
COOLIFY_AUTO_UPDATE=false
|
||||
20
.eslintrc.cjs
Normal file
20
.eslintrc.cjs
Normal file
@@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
plugins: ['svelte3', '@typescript-eslint'],
|
||||
ignorePatterns: ['*.cjs'],
|
||||
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||
settings: {
|
||||
'svelte3/typescript': () => require('typescript')
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
es2017: true,
|
||||
node: true
|
||||
}
|
||||
};
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
ko_fi: andrasbacsai
|
||||
open_collective: coollabsio
|
||||
|
||||
23
.gitignore
vendored
23
.gitignore
vendored
@@ -1,11 +1,16 @@
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
.routify
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
.pnpm-store
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
|
||||
1
.husky/_/.gitignore
vendored
Normal file
1
.husky/_/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*
|
||||
4
.husky/pre-commit
Executable file
4
.husky/pre-commit
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
pnpm lint-staged
|
||||
5
.lintstagedrc.json
Normal file
5
.lintstagedrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
|
||||
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
|
||||
]
|
||||
}
|
||||
18
.prettierrc
18
.prettierrc
@@ -1,14 +1,6 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"printWidth": 80,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"svelteSortOrder" : "styles-scripts-markup",
|
||||
"svelteStrictMode": true,
|
||||
"svelteBracketNewLine": true,
|
||||
"svelteAllowShorthand": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
}
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
||||
11
.vscode/settings.json
vendored
Normal file
11
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.extract.ignoredByFiles": {
|
||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
||||
},
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
||||
"i18n-ally.extract.autoDetect": true
|
||||
}
|
||||
277
CONTRIBUTING.md
Normal file
277
CONTRIBUTING.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS.
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
#### Setup a local development environment
|
||||
|
||||
- Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
- Install dependencies with `pnpm install`.
|
||||
- Need to create a local SQlite database with `pnpm db:push`.
|
||||
- This will apply all migrations at `db/dev.db`.
|
||||
- Seed the database with base entities with `pnpm db:seed`
|
||||
- You can start coding after starting `pnpm dev`.
|
||||
|
||||
#### How to start after you set up your local fork?
|
||||
|
||||
This repository works better with [pnpm](https://pnpm.io) due to the lock file. I recommend you to give it a try and use `pnpm` as well because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
## 🧑💻 Developer contribution
|
||||
|
||||
### Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: Svelte / SvelteKit
|
||||
- **Database ORM**: Prisma.io
|
||||
- **Docker Engine**
|
||||
|
||||
### Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
### Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
|
||||
|
||||
---
|
||||
|
||||
# How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationshup with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Update global functions
|
||||
|
||||
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: Prisma.ServiceInclude = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
```js
|
||||
if (body.umami?.postgresqlPassword)
|
||||
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
|
||||
|
||||
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
### Create API endpoints.
|
||||
|
||||
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
|
||||
|
||||
#### `index.json.ts`:
|
||||
|
||||
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
|
||||
|
||||
```js
|
||||
import { getUserDetails } from '$lib/common';
|
||||
import * as db from '$lib/database';
|
||||
import { ErrorHandler } from '$lib/database';
|
||||
import type { RequestHandler } from '@sveltejs/kit';
|
||||
|
||||
export const post: RequestHandler = async (event) => {
|
||||
const { status, body } = await getUserDetails(event);
|
||||
if (status === 401) return { status, body };
|
||||
|
||||
const { id } = event.params;
|
||||
|
||||
let { name, fqdn } = await event.request.json();
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
|
||||
try {
|
||||
await db.updateService({ id, fqdn, name });
|
||||
return { status: 201 };
|
||||
} catch (error) {
|
||||
return ErrorHandler(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
If it's necessary, you can create your own database update function, specifically for the new service.
|
||||
|
||||
#### `start.json.ts`
|
||||
|
||||
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
|
||||
|
||||
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
|
||||
|
||||
#### `stop.json.ts`
|
||||
|
||||
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
|
||||
|
||||
## Frontend
|
||||
|
||||
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
2. You need to include it the logo at
|
||||
|
||||
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
|
||||
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
|
||||
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
|
||||
|
||||
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
|
||||
|
||||
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
|
||||
|
||||
## 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating!
|
||||
42
Dockerfile
Normal file
42
Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
FROM node:16.14.2-alpine as install
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
|
||||
COPY package*.json .
|
||||
RUN pnpm install
|
||||
|
||||
FROM node:16.14.2-alpine
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:latest /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
COPY --from=install /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
|
||||
RUN pnpm prisma generate
|
||||
RUN pnpm build
|
||||
|
||||
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["pnpm", "start"]
|
||||
140
README.md
140
README.md
@@ -1,93 +1,109 @@
|
||||
# About
|
||||
# Coolify
|
||||
|
||||
https://andrasbacsai.com/farewell-netlify-and-heroku-after-3-days-of-coding
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
# Features
|
||||
- Deploy your Node.js, static sites, PHP or any custom application (with custom Dockerfile) just by pushing code to git.
|
||||
- Hassle-free installation and upgrade process.
|
||||
- One-click MongoDB, MySQL, PostgreSQL, CouchDB deployments!
|
||||
## Live Demo
|
||||
|
||||
# Upcoming features
|
||||
- Backups & monitoring.
|
||||
- User analytics with privacy in mind.
|
||||
- And much more (see [Roadmap](https://github.com/coollabsio/coolify/projects/1)).
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsive, that means someone overloaded the server. 🙃)
|
||||
|
||||
# FAQ
|
||||
Q: What is a buildpack?
|
||||
## How to install
|
||||
|
||||
A: It defines your application's final form.
|
||||
`Static` means that it will be hosted as a static site.
|
||||
`NodeJs` means that it will be started as a node application.
|
||||
Installation is automated with the following command:
|
||||
|
||||
# Screenshots
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
[Login](https://coollabs.io/coolify/login.jpg)
|
||||
If you would like no questions during installation:
|
||||
|
||||
[Applications](https://coollabs.io/coolify/applications.jpg)
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
[Databases](https://coollabs.io/coolify/databases.jpg)
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
[Configuration](https://coollabs.io/coolify/configuration.jpg)
|
||||
## Features
|
||||
|
||||
[Settings](https://coollabs.io/coolify/settings.jpg)
|
||||
### Git Sources
|
||||
|
||||
[Logs](https://coollabs.io/coolify/logs.jpg)
|
||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
||||
|
||||
# Getting Started
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
Automatically: `/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"`
|
||||
### Destinations
|
||||
|
||||
Manually:
|
||||
### Requirements before installation
|
||||
- [Docker](https://docs.docker.com/engine/install/) version 20+
|
||||
- Docker in [swarm mode enabled](https://docs.docker.com/engine/reference/commandline/swarm_init/) (should be set manually before installation)
|
||||
- A [MongoDB](https://docs.mongodb.com/manual/installation/) instance.
|
||||
- We have a [simple installation](https://github.com/coollabsio/infrastructure/tree/main/mongo) if you need one
|
||||
- A configured DNS entry (see `.env.template`)
|
||||
- [Github App](https://docs.github.com/en/developers/apps/creating-a-github-app)
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
- GitHub App name: could be anything weird
|
||||
- Homepage URL: https://yourdomain
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
Identifying and authorizing users:
|
||||
- Callback URL: https://yourdomain/api/v1/login/github/app
|
||||
- Request user authorization (OAuth) during installation -> Check!
|
||||
### Applications
|
||||
|
||||
Webhook:
|
||||
- Active -> Check!
|
||||
- Webhook URL: https://yourdomain/api/v1/webhooks/deploy
|
||||
- Webhook Secret: it should be super secret
|
||||
These are the predefined build packs, but with the Docker build pack, you can host anything that is hostable with a single Dockerfile.
|
||||
|
||||
Repository permissions:
|
||||
- Contents: Read-only
|
||||
- Metadata: Read-only
|
||||
|
||||
User permissions:
|
||||
- Email: Read-only
|
||||
- Static sites
|
||||
- NodeJS
|
||||
- VueJS
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- NextJS
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Laravel
|
||||
- Rust
|
||||
- Docker
|
||||
|
||||
Subscribe to events:
|
||||
- Push -> Check!
|
||||
### Databases
|
||||
|
||||
### Installation
|
||||
- Clone this repository: `git clone git@github.com:coollabsio/coolify.git`
|
||||
- Set `.env` (see `.env.template`)
|
||||
- Installation: `bash install.sh all`
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
## Manual updating process (You probably never need to do this!)
|
||||
### Update everything (proxy+coolify)
|
||||
- `bash install.sh all`
|
||||
- MongoDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
### Update coolify only
|
||||
- `bash install.sh coolify`
|
||||
### One-click services
|
||||
|
||||
### Update proxy only
|
||||
- `bash install.sh proxy`
|
||||
You can host cool open-source services as well:
|
||||
|
||||
- [WordPress](https://wordpress.org)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://plausible.io)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 is not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
# Contact
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Contribute
|
||||
|
||||
See [our contribution guide](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
# License
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
|
||||
28
api/app.js
28
api/app.js
@@ -1,28 +0,0 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
server.register(require('./routes/v1/config'), { prefix: '/config' })
|
||||
server.register(require('./routes/v1/application/remove'), { prefix: '/application/remove' })
|
||||
server.register(require('./routes/v1/application/logs'), { prefix: '/application/logs' })
|
||||
server.register(require('./routes/v1/application/check'), { prefix: '/application/check' })
|
||||
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
|
||||
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
|
||||
server.register(require('./routes/v1/databases'), { prefix: '/databases' })
|
||||
server.register(require('./routes/v1/server'), { prefix: '/server' })
|
||||
})
|
||||
// Public routes
|
||||
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })
|
||||
fastify.register(require('./routes/v1/login/github'), {
|
||||
prefix: '/login/github'
|
||||
})
|
||||
fastify.register(require('./routes/v1/webhooks/deploy'), {
|
||||
prefix: '/webhooks/deploy'
|
||||
})
|
||||
fastify.register(require('./routes/v1/undead'), {
|
||||
prefix: '/undead'
|
||||
})
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw new Error('No custom dockerfile found.')
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../libs/docker')
|
||||
const buildImageNodeDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ${configuration.build.directory}/package*.json ./`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`COPY ./${configuration.build.directory} ./`,
|
||||
`RUN ${configuration.build.command.build}`
|
||||
].join('\n')
|
||||
}
|
||||
async function buildImage (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildImage
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
const static = require('./static')
|
||||
const nodejs = require('./nodejs')
|
||||
const php = require('./php')
|
||||
const custom = require('./custom')
|
||||
const rust = require('./rust')
|
||||
|
||||
module.exports = { static, nodejs, php, custom, rust }
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// `HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost:${configuration.publish.port}${configuration.publish.path} || exit 1`,
|
||||
const publishNodejsDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `
|
||||
COPY ${configuration.build.directory}/package*.json ./
|
||||
RUN ${configuration.build.command.installation}
|
||||
COPY ./${configuration.build.directory} ./`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
'CMD [ "yarn", "start" ]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishPHPDocker = (configuration) => {
|
||||
return [
|
||||
'FROM php:apache',
|
||||
'RUN a2enmod rewrite',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ./${configuration.build.directory} /var/www/html`,
|
||||
'EXPOSE 80',
|
||||
' CMD ["apache2-foreground"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
const { execShellAsync } = require('../../libs/common')
|
||||
const TOML = require('@iarna/toml')
|
||||
|
||||
const publishRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target target`,
|
||||
`COPY --from=${configuration.build.container.name}:cache /usr/local/cargo /usr/local/cargo`,
|
||||
'COPY . .',
|
||||
`RUN cargo build --release --bin ${custom.name}`,
|
||||
'FROM debian:buster-slim',
|
||||
'WORKDIR /app',
|
||||
'RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*',
|
||||
'RUN update-ca-certificates',
|
||||
`COPY --from=${configuration.build.container.name}:cache /app/target/release/${custom.name} ${custom.name}`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
`CMD ["/app/${custom.name}"]`
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
const cacheRustDocker = (configuration, custom) => {
|
||||
return [
|
||||
`FROM rust:latest AS planner-${configuration.build.container.name}`,
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
'COPY . .',
|
||||
'RUN cargo chef prepare --recipe-path recipe.json',
|
||||
'FROM rust:latest',
|
||||
'WORKDIR /app',
|
||||
'RUN cargo install cargo-chef',
|
||||
`COPY --from=planner-${configuration.build.container.name} /app/recipe.json recipe.json`,
|
||||
'RUN cargo chef cook --release --recipe-path recipe.json'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const cargoToml = await execShellAsync(`cat ${configuration.general.workdir}/Cargo.toml`)
|
||||
const parsedToml = TOML.parse(cargoToml)
|
||||
const custom = {
|
||||
name: parsedToml.package.name
|
||||
}
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, cacheRustDocker(configuration, custom))
|
||||
|
||||
let stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:cache` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishRustDocker(configuration, custom))
|
||||
|
||||
stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
// 'HEALTHCHECK --timeout=10s --start-period=10s --interval=5s CMD curl -I -s -f http://localhost/ || exit 1',
|
||||
const publishStaticDocker = (configuration) => {
|
||||
return [
|
||||
'FROM nginx:stable-alpine',
|
||||
'COPY nginx.conf /etc/nginx/nginx.conf',
|
||||
'WORKDIR /usr/share/nginx/html',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ./${configuration.build.directory} ./`,
|
||||
'EXPOSE 80',
|
||||
'CMD ["nginx", "-g", "daemon off;"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server-core')
|
||||
|
||||
const mongoServer = new MongoMemoryServer({
|
||||
instance: {
|
||||
port: 27017,
|
||||
dbName: 'coolify',
|
||||
storageEngine: 'wiredTiger'
|
||||
},
|
||||
binary: {
|
||||
version: '4.4.3'
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
mongoose.Promise = Promise
|
||||
mongoServer.getUri().then((mongoUri) => {
|
||||
const mongooseOpts = {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true
|
||||
}
|
||||
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
|
||||
mongoose.connection.on('error', (e) => {
|
||||
if (e.message.code === 'ETIMEDOUT') {
|
||||
console.log(e)
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
}
|
||||
console.log(e)
|
||||
})
|
||||
|
||||
mongoose.connection.once('open', () => {
|
||||
console.log(`Started in-memory mongodb ${mongoUri}`)
|
||||
})
|
||||
})
|
||||
@@ -1,28 +0,0 @@
|
||||
const packs = require('../../../buildPacks')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const deployId = configuration.general.deployId
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
await execute(configuration)
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} else {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
} catch (error) {
|
||||
// Hmm.
|
||||
}
|
||||
throw new Error('No buildpack found.')
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
const { docker } = require('../../docker')
|
||||
const { execShellAsync } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeImagesContainers () {
|
||||
await execShellAsync('docker container prune -f')
|
||||
await execShellAsync('docker image prune -f --filter=label!=coolify-reserve=true')
|
||||
}
|
||||
|
||||
async function cleanupStuckedDeploymentsInDB () {
|
||||
// Cleanup stucked deployments.
|
||||
await Deployment.updateMany(
|
||||
{ progress: { $in: ['queued', 'inprogress'] } },
|
||||
{ progress: 'failed' }
|
||||
)
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { cleanupStuckedDeploymentsInDB, deleteSameDeployments, purgeImagesContainers }
|
||||
@@ -1,136 +0,0 @@
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const cuid = require('cuid')
|
||||
const crypto = require('crypto')
|
||||
const { docker } = require('../docker')
|
||||
const { execShellAsync } = require('../common')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 3
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first',
|
||||
failure_action: 'rollback'
|
||||
}
|
||||
}
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
} else if (configuration.build.pack === 'rust') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
|
||||
if (!configuration.build.directory) configuration.build.directory = ''
|
||||
if (configuration.build.directory.startsWith('/')) configuration.build.directory = configuration.build.directory.replace('/', '')
|
||||
|
||||
if (!configuration.publish.directory) configuration.publish.directory = ''
|
||||
if (configuration.publish.directory.startsWith('/')) configuration.publish.directory = configuration.publish.directory.replace('/', '')
|
||||
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration) {
|
||||
// In case of any failure during deployment, still update the current configuration.
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
const found = services.find(s => {
|
||||
const config = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
|
||||
return config
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
await execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function precheckDeployment ({ services, configuration }) {
|
||||
let foundService = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
// Base service configuration changed
|
||||
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
|
||||
forceUpdate = true
|
||||
}
|
||||
// If the deployment is in error state, forceUpdate
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running' && n.Image.split(':')[1] === running.build.container.tag)
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
foundService = true
|
||||
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
// If only the configuration changed
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
// If only the image changed
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
// If build pack changed, forceUpdate the service
|
||||
if (running.build.pack !== configuration.build.pack) forceUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
}
|
||||
return {
|
||||
foundService,
|
||||
imageChanged,
|
||||
configChanged,
|
||||
forceUpdate
|
||||
}
|
||||
}
|
||||
module.exports = { setDefaultConfiguration, updateServiceLabels, precheckDeployment }
|
||||
@@ -1,64 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
// TODO: Write full .dockerignore for all deployments!!
|
||||
if (configuration.build.pack === 'php') {
|
||||
await fs.writeFile(`${configuration.general.workdir}/.htaccess`, `
|
||||
RewriteEngine On
|
||||
RewriteBase /
|
||||
RewriteCond %{REQUEST_FILENAME} !-d
|
||||
RewriteCond %{REQUEST_FILENAME} !-f
|
||||
RewriteRule ^(.+)$ index.php [QSA,L]
|
||||
`)
|
||||
}
|
||||
// await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
|
||||
if (configuration.build.pack === 'static') {
|
||||
await fs.writeFile(
|
||||
`${configuration.general.workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
|
||||
access_log off;
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const { execShellAsync } = require('../../common')
|
||||
const { docker } = require('../../docker')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
|
||||
module.exports = async function (configuration, imageChanged) {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
const jwt = require('jsonwebtoken')
|
||||
const axios = require('axios')
|
||||
const { execShellAsync, cleanupTmp } = require('../../common')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { workdir } = configuration.general
|
||||
const { organization, name, branch } = configuration.repository
|
||||
const github = configuration.github
|
||||
|
||||
const githubPrivateKey = process.env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, '\n').replace(/"/g, '')
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { cleanupTmp } = require('../common')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { purgeImagesContainers } = require('./cleanup')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname, workdir } = configuration.general
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration)
|
||||
cleanupTmp(workdir)
|
||||
await purgeImagesContainers()
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
@@ -1,98 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const shell = require('shelljs')
|
||||
const jsonwebtoken = require('jsonwebtoken')
|
||||
const { docker } = require('./docker')
|
||||
const User = require('../models/User')
|
||||
const algorithm = 'aes-256-cbc'
|
||||
const key = process.env.SECRETS_ENCRYPTION_KEY
|
||||
|
||||
function delay (t) {
|
||||
return new Promise(function (resolve) {
|
||||
setTimeout(function () {
|
||||
resolve('OK')
|
||||
}, t)
|
||||
})
|
||||
}
|
||||
|
||||
async function verifyUserId (authorization) {
|
||||
try {
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
function execShellAsync (cmd, opts = {}) {
|
||||
try {
|
||||
return new Promise(function (resolve, reject) {
|
||||
shell.config.silent = true
|
||||
shell.exec(cmd, opts, function (code, stdout, stderr) {
|
||||
if (code !== 0) return reject(new Error(stderr))
|
||||
return resolve(stdout)
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
return new Error('Oops')
|
||||
}
|
||||
}
|
||||
function cleanupTmp (dir) {
|
||||
if (dir !== '/') shell.rm('-fr', dir)
|
||||
}
|
||||
|
||||
async function checkImageAvailable (name) {
|
||||
let cacheAvailable = false
|
||||
try {
|
||||
await docker.engine.getImage(name).get()
|
||||
cacheAvailable = true
|
||||
} catch (e) {
|
||||
// Cache image not found
|
||||
}
|
||||
return cacheAvailable
|
||||
}
|
||||
|
||||
function encryptData (text) {
|
||||
const iv = crypto.randomBytes(16)
|
||||
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
|
||||
let encrypted = cipher.update(text)
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()])
|
||||
return { iv: iv.toString('hex'), encryptedData: encrypted.toString('hex') }
|
||||
}
|
||||
|
||||
function decryptData (text) {
|
||||
const iv = Buffer.from(text.iv, 'hex')
|
||||
const encryptedText = Buffer.from(text.encryptedData, 'hex')
|
||||
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
|
||||
let decrypted = decipher.update(encryptedText)
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()])
|
||||
return decrypted.toString()
|
||||
}
|
||||
|
||||
function createToken (payload) {
|
||||
const { uuid } = payload
|
||||
return jsonwebtoken.sign({}, process.env.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolify',
|
||||
issuer: 'coolify',
|
||||
jwtid: uuid,
|
||||
subject: `User:${uuid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
delay,
|
||||
createToken,
|
||||
execShellAsync,
|
||||
cleanupTmp,
|
||||
checkImageAvailable,
|
||||
encryptData,
|
||||
decryptData,
|
||||
verifyUserId
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const Dockerode = require('dockerode')
|
||||
const { saveAppLog } = require('./logging')
|
||||
|
||||
const docker = {
|
||||
engine: new Dockerode({
|
||||
socketPath: process.env.DOCKER_ENGINE
|
||||
}),
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
saveAppLog(event.error, configuration, true)
|
||||
reject(event.error)
|
||||
} else if (event.stream) {
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
@@ -1,75 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.handleErrors = exports.handleValidationError = exports.handleNotFoundError = void 0;
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
const validation_1 = require("./validation");
|
||||
function handleNotFoundError(request, reply) {
|
||||
handleErrors(new http_errors_enhanced_1.NotFoundError('Not found.'), request, reply);
|
||||
}
|
||||
exports.handleNotFoundError = handleNotFoundError;
|
||||
function handleValidationError(error, request) {
|
||||
/*
|
||||
As seen in https://github.com/fastify/fastify/blob/master/lib/validation.js
|
||||
the error.message will always start with the relative section (params, querystring, headers, body)
|
||||
and fastify throws on first failing section.
|
||||
*/
|
||||
const section = error.message.match(/^\w+/)[0];
|
||||
return new http_errors_enhanced_1.BadRequestError('One or more validations failed trying to process your request.', {
|
||||
failedValidations: validation_1.convertValidationErrors(section, Reflect.get(request, section), error.validation)
|
||||
});
|
||||
}
|
||||
exports.handleValidationError = handleValidationError;
|
||||
function handleErrors(error, request, reply) {
|
||||
var _a, _b;
|
||||
// It is a generic error, handle it
|
||||
const code = error.code;
|
||||
if (!('statusCode' in error)) {
|
||||
if ('validation' in error && ((_a = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _a === void 0 ? void 0 : _a.convertValidationErrors)) {
|
||||
// If it is a validation error, convert errors to human friendly format
|
||||
error = handleValidationError(error, request);
|
||||
}
|
||||
else if ((_b = request[interfaces_1.kHttpErrorsEnhancedConfiguration]) === null || _b === void 0 ? void 0 : _b.hideUnhandledErrors) {
|
||||
// It is requested to hide the error, just log it and then create a generic one
|
||||
request.log.error({ error: http_errors_enhanced_1.serializeError(error) });
|
||||
error = new http_errors_enhanced_1.InternalServerError('An error occurred trying to process your request.');
|
||||
}
|
||||
else {
|
||||
// Wrap in a HttpError, making the stack explicitily available
|
||||
error = new http_errors_enhanced_1.InternalServerError(http_errors_enhanced_1.serializeError(error));
|
||||
Object.defineProperty(error, 'stack', { enumerable: true });
|
||||
}
|
||||
}
|
||||
else if (code === 'INVALID_CONTENT_TYPE' || code === 'FST_ERR_CTP_INVALID_MEDIA_TYPE') {
|
||||
error = new http_errors_enhanced_1.UnsupportedMediaTypeError(utils_1.upperFirst(validation_1.validationMessagesFormatters.contentType()));
|
||||
}
|
||||
else if (code === 'FST_ERR_CTP_EMPTY_JSON_BODY') {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.jsonEmpty()));
|
||||
}
|
||||
else if (code === 'MALFORMED_JSON' || error.message === 'Invalid JSON' || error.stack.includes('at JSON.parse')) {
|
||||
error = new http_errors_enhanced_1.BadRequestError(utils_1.upperFirst(validation_1.validationMessagesFormatters.json()));
|
||||
}
|
||||
// Get the status code
|
||||
let { statusCode, headers } = error;
|
||||
// Code outside HTTP range
|
||||
if (statusCode < 100 || statusCode > 599) {
|
||||
statusCode = http_errors_enhanced_1.INTERNAL_SERVER_ERROR;
|
||||
}
|
||||
// Create the body
|
||||
const body = {
|
||||
statusCode,
|
||||
error: http_errors_enhanced_1.messagesByCodes[statusCode],
|
||||
message: error.message
|
||||
};
|
||||
http_errors_enhanced_1.addAdditionalProperties(body, error);
|
||||
// Send the error back
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
reply
|
||||
.code(statusCode)
|
||||
.headers(headers !== null && headers !== void 0 ? headers : {})
|
||||
.type('application/json')
|
||||
.send(body);
|
||||
}
|
||||
exports.handleErrors = handleErrors;
|
||||
@@ -1,58 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.plugin = exports.validationMessagesFormatters = exports.niceJoin = exports.convertValidationErrors = void 0;
|
||||
const fastify_plugin_1 = __importDefault(require("fastify-plugin"));
|
||||
const handlers_1 = require("./handlers");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const validation_1 = require("./validation");
|
||||
__exportStar(require("./handlers"), exports);
|
||||
__exportStar(require("./interfaces"), exports);
|
||||
var validation_2 = require("./validation");
|
||||
Object.defineProperty(exports, "convertValidationErrors", { enumerable: true, get: function () { return validation_2.convertValidationErrors; } });
|
||||
Object.defineProperty(exports, "niceJoin", { enumerable: true, get: function () { return validation_2.niceJoin; } });
|
||||
Object.defineProperty(exports, "validationMessagesFormatters", { enumerable: true, get: function () { return validation_2.validationMessagesFormatters; } });
|
||||
exports.plugin = fastify_plugin_1.default(function (instance, options, done) {
|
||||
var _a, _b, _c, _d;
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const convertResponsesValidationErrors = (_a = options.convertResponsesValidationErrors) !== null && _a !== void 0 ? _a : !isProduction;
|
||||
const configuration = {
|
||||
hideUnhandledErrors: (_b = options.hideUnhandledErrors) !== null && _b !== void 0 ? _b : isProduction,
|
||||
convertValidationErrors: (_c = options.convertValidationErrors) !== null && _c !== void 0 ? _c : true,
|
||||
responseValidatorCustomizer: options.responseValidatorCustomizer,
|
||||
allowUndeclaredResponses: (_d = options.allowUndeclaredResponses) !== null && _d !== void 0 ? _d : false
|
||||
};
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.decorateRequest(interfaces_1.kHttpErrorsEnhancedConfiguration, null);
|
||||
instance.addHook('onRequest', async (request) => {
|
||||
request[interfaces_1.kHttpErrorsEnhancedConfiguration] = configuration;
|
||||
});
|
||||
instance.setErrorHandler(handlers_1.handleErrors);
|
||||
// instance.setNotFoundHandler(handlers_1.handleNotFoundError);
|
||||
if (convertResponsesValidationErrors) {
|
||||
instance.decorate(interfaces_1.kHttpErrorsEnhancedResponseValidations, []);
|
||||
instance.addHook('onRoute', validation_1.addResponseValidation);
|
||||
instance.addHook('onReady', validation_1.compileResponseValidationSchema.bind(instance, configuration));
|
||||
}
|
||||
done();
|
||||
}, { name: 'fastify-http-errors-enhanced' });
|
||||
exports.default = exports.plugin;
|
||||
// Fix CommonJS exporting
|
||||
/* istanbul ignore else */
|
||||
if (typeof module !== 'undefined') {
|
||||
module.exports = exports.plugin;
|
||||
Object.assign(module.exports, exports);
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.kHttpErrorsEnhancedResponseValidations = exports.kHttpErrorsEnhancedConfiguration = void 0;
|
||||
exports.kHttpErrorsEnhancedConfiguration = Symbol('fastify-http-errors-enhanced-configuration');
|
||||
exports.kHttpErrorsEnhancedResponseValidations = Symbol('fastify-http-errors-enhanced-response-validation');
|
||||
@@ -1,31 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.get = exports.upperFirst = void 0;
|
||||
function upperFirst(source) {
|
||||
if (typeof source !== 'string' || !source.length) {
|
||||
return source;
|
||||
}
|
||||
return source[0].toUpperCase() + source.substring(1);
|
||||
}
|
||||
exports.upperFirst = upperFirst;
|
||||
function get(target, path) {
|
||||
var _a;
|
||||
const tokens = path.split('.').map((t) => t.trim());
|
||||
for (const token of tokens) {
|
||||
if (typeof target === 'undefined' || target === null) {
|
||||
// We're supposed to be still iterating, but the chain is over - Return undefined
|
||||
target = undefined;
|
||||
break;
|
||||
}
|
||||
const index = token.match(/^(\d+)|(?:\[(\d+)\])$/);
|
||||
if (index) {
|
||||
target = target[parseInt((_a = index[1]) !== null && _a !== void 0 ? _a : index[2], 10)];
|
||||
}
|
||||
else {
|
||||
target = target[token];
|
||||
}
|
||||
}
|
||||
return target;
|
||||
}
|
||||
exports.get = get;
|
||||
@@ -1,239 +0,0 @@
|
||||
/* eslint-disable */
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.compileResponseValidationSchema = exports.addResponseValidation = exports.convertValidationErrors = exports.validationMessagesFormatters = exports.niceJoin = void 0;
|
||||
const ajv_1 = __importDefault(require("ajv"));
|
||||
const http_errors_enhanced_1 = require("http-errors-enhanced");
|
||||
const interfaces_1 = require("./interfaces");
|
||||
const utils_1 = require("./utils");
|
||||
function niceJoin(array, lastSeparator = ' and ', separator = ', ') {
|
||||
switch (array.length) {
|
||||
case 0:
|
||||
return '';
|
||||
case 1:
|
||||
return array[0];
|
||||
case 2:
|
||||
return array.join(lastSeparator);
|
||||
default:
|
||||
return array.slice(0, array.length - 1).join(separator) + lastSeparator + array[array.length - 1];
|
||||
}
|
||||
}
|
||||
exports.niceJoin = niceJoin;
|
||||
exports.validationMessagesFormatters = {
|
||||
contentType: () => 'only JSON payloads are accepted. Please set the "Content-Type" header to start with "application/json"',
|
||||
json: () => 'the body payload is not a valid JSON',
|
||||
jsonEmpty: () => 'the JSON body payload cannot be empty if the "Content-Type" header is set',
|
||||
missing: () => 'must be present',
|
||||
unknown: () => 'is not a valid property',
|
||||
uuid: () => 'must be a valid GUID (UUID v4)',
|
||||
timestamp: () => 'must be a valid ISO 8601 / RFC 3339 timestamp (example: 2018-07-06T12:34:56Z)',
|
||||
date: () => 'must be a valid ISO 8601 / RFC 3339 date (example: 2018-07-06)',
|
||||
time: () => 'must be a valid ISO 8601 / RFC 3339 time (example: 12:34:56)',
|
||||
uri: () => 'must be a valid URI',
|
||||
hostname: () => 'must be a valid hostname',
|
||||
ipv4: () => 'must be a valid IPv4',
|
||||
ipv6: () => 'must be a valid IPv6',
|
||||
paramType: (type) => {
|
||||
switch (type) {
|
||||
case 'integer':
|
||||
return 'must be a valid integer number';
|
||||
case 'number':
|
||||
return 'must be a valid number';
|
||||
case 'boolean':
|
||||
return 'must be a valid boolean (true or false)';
|
||||
case 'object':
|
||||
return 'must be a object';
|
||||
case 'array':
|
||||
return 'must be an array';
|
||||
default:
|
||||
return 'must be a string';
|
||||
}
|
||||
},
|
||||
presentString: () => 'must be a non empty string',
|
||||
minimum: (min) => `must be a number greater than or equal to ${min}`,
|
||||
maximum: (max) => `must be a number less than or equal to ${max}`,
|
||||
minimumProperties(min) {
|
||||
return min === 1 ? 'cannot be a empty object' : `must be a object with at least ${min} properties`;
|
||||
},
|
||||
maximumProperties(max) {
|
||||
return max === 0 ? 'must be a empty object' : `must be a object with at most ${max} properties`;
|
||||
},
|
||||
minimumItems(min) {
|
||||
return min === 1 ? 'cannot be a empty array' : `must be an array with at least ${min} items`;
|
||||
},
|
||||
maximumItems(max) {
|
||||
return max === 0 ? 'must be a empty array' : `must be an array with at most ${max} items`;
|
||||
},
|
||||
enum: (values) => `must be one of the following values: ${niceJoin(values.map((f) => `"${f}"`), ' or ')}`,
|
||||
pattern: (pattern) => `must match pattern "${pattern.replace(/\(\?:/g, '(')}"`,
|
||||
invalidResponseCode: (code) => `This endpoint cannot respond with HTTP status ${code}.`,
|
||||
invalidResponse: (code) => `The response returned from the endpoint violates its specification for the HTTP status ${code}.`,
|
||||
invalidFormat: (format) => `must match format "${format}" (format)`
|
||||
};
|
||||
function convertValidationErrors(section, data, validationErrors) {
|
||||
const errors = {};
|
||||
if (section === 'querystring') {
|
||||
section = 'query';
|
||||
}
|
||||
// For each error
|
||||
for (const e of validationErrors) {
|
||||
let message = '';
|
||||
let pattern;
|
||||
let value;
|
||||
let reason;
|
||||
// Normalize the key
|
||||
let key = e.dataPath;
|
||||
if (key.startsWith('.')) {
|
||||
key = key.substring(1);
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.startsWith('[') && key.endsWith(']')) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Depending on the type
|
||||
switch (e.keyword) {
|
||||
case 'required':
|
||||
case 'dependencies':
|
||||
key = e.params.missingProperty;
|
||||
message = exports.validationMessagesFormatters.missing();
|
||||
break;
|
||||
case 'additionalProperties':
|
||||
key = e.params.additionalProperty;
|
||||
message = exports.validationMessagesFormatters.unknown();
|
||||
break;
|
||||
case 'type':
|
||||
message = exports.validationMessagesFormatters.paramType(e.params.type);
|
||||
break;
|
||||
case 'minProperties':
|
||||
message = exports.validationMessagesFormatters.minimumProperties(e.params.limit);
|
||||
break;
|
||||
case 'maxProperties':
|
||||
message = exports.validationMessagesFormatters.maximumProperties(e.params.limit);
|
||||
break;
|
||||
case 'minItems':
|
||||
message = exports.validationMessagesFormatters.minimumItems(e.params.limit);
|
||||
break;
|
||||
case 'maxItems':
|
||||
message = exports.validationMessagesFormatters.maximumItems(e.params.limit);
|
||||
break;
|
||||
case 'minimum':
|
||||
message = exports.validationMessagesFormatters.minimum(e.params.limit);
|
||||
break;
|
||||
case 'maximum':
|
||||
message = exports.validationMessagesFormatters.maximum(e.params.limit);
|
||||
break;
|
||||
case 'enum':
|
||||
message = exports.validationMessagesFormatters.enum(e.params.allowedValues);
|
||||
break;
|
||||
case 'pattern':
|
||||
pattern = e.params.pattern;
|
||||
value = utils_1.get(data, key);
|
||||
if (pattern === '.+' && !value) {
|
||||
message = exports.validationMessagesFormatters.presentString();
|
||||
}
|
||||
else {
|
||||
message = exports.validationMessagesFormatters.pattern(e.params.pattern);
|
||||
}
|
||||
break;
|
||||
case 'format':
|
||||
reason = e.params.format;
|
||||
// Normalize the key
|
||||
if (reason === 'date-time') {
|
||||
reason = 'timestamp';
|
||||
}
|
||||
message = (exports.validationMessagesFormatters[reason] || exports.validationMessagesFormatters.invalidFormat)(reason);
|
||||
break;
|
||||
}
|
||||
// No custom message was found, default to input one replacing the starting verb and adding some path info
|
||||
if (!message.length) {
|
||||
message = `${e.message.replace(/^should/, 'must')} (${e.keyword})`;
|
||||
}
|
||||
// Remove useless quotes
|
||||
/* istanbul ignore next */
|
||||
if (key.match(/(?:^['"])(?:[^.]+)(?:['"]$)/)) {
|
||||
key = key.substring(1, key.length - 1);
|
||||
}
|
||||
// Fix empty properties
|
||||
if (!key) {
|
||||
key = '$root';
|
||||
}
|
||||
key = key.replace(/^\//, '');
|
||||
errors[key] = message;
|
||||
}
|
||||
return { [section]: errors };
|
||||
}
|
||||
exports.convertValidationErrors = convertValidationErrors;
|
||||
function addResponseValidation(route) {
|
||||
var _a;
|
||||
if (!((_a = route.schema) === null || _a === void 0 ? void 0 : _a.response)) {
|
||||
return;
|
||||
}
|
||||
const validators = {};
|
||||
/*
|
||||
Add these validators to the list of the one to compile once the server is started.
|
||||
This makes possible to handle shared schemas.
|
||||
*/
|
||||
this[interfaces_1.kHttpErrorsEnhancedResponseValidations].push([
|
||||
this,
|
||||
validators,
|
||||
Object.entries(route.schema.response)
|
||||
]);
|
||||
// Note that this hook is not called for non JSON payloads therefore validation is not possible in such cases
|
||||
route.preSerialization = async function (request, reply, payload) {
|
||||
const statusCode = reply.raw.statusCode;
|
||||
// Never validate error 500
|
||||
if (statusCode === http_errors_enhanced_1.INTERNAL_SERVER_ERROR) {
|
||||
return payload;
|
||||
}
|
||||
// No validator, it means the HTTP status is not allowed
|
||||
const validator = validators[statusCode];
|
||||
if (!validator) {
|
||||
if (request[interfaces_1.kHttpErrorsEnhancedConfiguration].allowUndeclaredResponses) {
|
||||
return payload;
|
||||
}
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponseCode(statusCode));
|
||||
}
|
||||
// Now validate the payload
|
||||
const valid = validator(payload);
|
||||
if (!valid) {
|
||||
throw new http_errors_enhanced_1.InternalServerError(exports.validationMessagesFormatters.invalidResponse(statusCode), {
|
||||
failedValidations: convertValidationErrors('response', payload, validator.errors)
|
||||
});
|
||||
}
|
||||
return payload;
|
||||
};
|
||||
}
|
||||
exports.addResponseValidation = addResponseValidation;
|
||||
function compileResponseValidationSchema(configuration) {
|
||||
// Fix CJS/ESM interoperability
|
||||
// @ts-expect-error
|
||||
let AjvConstructor = ajv_1.default;
|
||||
/* istanbul ignore next */
|
||||
if (AjvConstructor.default) {
|
||||
AjvConstructor = AjvConstructor.default;
|
||||
}
|
||||
const hasCustomizer = typeof configuration.responseValidatorCustomizer === 'function';
|
||||
for (const [instance, validators, schemas] of this[interfaces_1.kHttpErrorsEnhancedResponseValidations]) {
|
||||
// @ts-expect-error
|
||||
const compiler = new AjvConstructor({
|
||||
// The fastify defaults, with the exception of removeAdditional and coerceTypes, which have been reversed
|
||||
removeAdditional: false,
|
||||
useDefaults: true,
|
||||
coerceTypes: false,
|
||||
allErrors: true
|
||||
});
|
||||
compiler.addSchema(Object.values(instance.getSchemas()));
|
||||
compiler.addKeyword('example');
|
||||
if (hasCustomizer) {
|
||||
configuration.responseValidatorCustomizer(compiler);
|
||||
}
|
||||
for (const [code, schema] of schemas) {
|
||||
validators[code] = compiler.compile(schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.compileResponseValidationSchema = compileResponseValidationSchema;
|
||||
@@ -1,48 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
const axios = require('axios')
|
||||
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const Settings = require('../models/Settings')
|
||||
const { version } = require('../../package.json')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
const patterns = [
|
||||
'[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
|
||||
'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
|
||||
].join('|')
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
const clearedEvent = '[ERROR 😱] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(new RegExp(patterns, 'g'), '').replace(/(\r\n|\n|\r)/gm, '')
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog (error) {
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const payload = { message: error.message, stack: error.stack, type: error.type || 'spaghetticode', version }
|
||||
|
||||
const found = await ServerLog.find(payload)
|
||||
if (found.length === 0 && error.message) await new ServerLog(payload).save()
|
||||
if (settings && settings.sendErrors && process.env.NODE_ENV === 'production') await axios.post('https://errors.coollabs.io/api/error', payload)
|
||||
}
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const deploymentSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
nickname: { type: String, required: true },
|
||||
repoId: { type: Number, required: true },
|
||||
organization: { type: String, required: true },
|
||||
name: { type: String, required: true },
|
||||
branch: { type: String, required: true },
|
||||
domain: { type: String, required: true },
|
||||
progress: { type: String, require: true, default: 'queued' }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('deployment', deploymentSchema)
|
||||
@@ -1,10 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
event: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-application', logSchema)
|
||||
@@ -1,14 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, default: version },
|
||||
type: { type: String, required: true },
|
||||
message: { type: String, required: true },
|
||||
stack: { type: String },
|
||||
seen: { type: Boolean, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-server', logSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false },
|
||||
sendErrors: { type: Boolean, required: true, default: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('settings', settingsSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
email: { type: String, required: true },
|
||||
avatar: { type: String },
|
||||
uid: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('user', userSchema)
|
||||
@@ -1,21 +0,0 @@
|
||||
const fp = require('fastify-plugin')
|
||||
const User = require('../models/User')
|
||||
module.exports = fp(async function (fastify, options, next) {
|
||||
fastify.register(require('fastify-jwt'), {
|
||||
secret: fastify.config.JWT_SIGN_KEY
|
||||
})
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
try {
|
||||
const { jti } = await request.jwtVerify()
|
||||
const found = await User.findOne({ uid: jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
} catch (err) {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
})
|
||||
next()
|
||||
})
|
||||
@@ -1,37 +0,0 @@
|
||||
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
try {
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
let foundDomain = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id &&
|
||||
running.publish.path === configuration.publish.path
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
|
||||
if (foundDomain) {
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
return { message: 'OK' }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const { verifyUserId, cleanupTmp } = require('../../../../libs/common')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../../libs/logging')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
let configuration
|
||||
try {
|
||||
await verifyUserId(request.headers.authorization)
|
||||
} catch (error) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
configuration = setDefaultConfiguration(request.body)
|
||||
if (!configuration) {
|
||||
throw new Error('Whaat?')
|
||||
}
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name, deployId: configuration.general.deployId })
|
||||
await queueAndBuild(configuration, imageChanged)
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name) {
|
||||
if (error.message && error.stack) await saveServerLog(error)
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const getLogSchema = {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
repoId: { type: 'string' },
|
||||
branch: { type: 'string' }
|
||||
},
|
||||
required: ['repoId', 'branch']
|
||||
}
|
||||
}
|
||||
fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
|
||||
try {
|
||||
const { repoId, branch, page } = request.query
|
||||
const onePage = 5
|
||||
const show = Number(page) * onePage || 5
|
||||
const deploy = await Deployment.find({ repoId, branch })
|
||||
.select('-_id -__v -repoId')
|
||||
.sort({ createdAt: 'desc' })
|
||||
.limit(show)
|
||||
|
||||
const finalLogs = deploy.map(d => {
|
||||
const finalLogs = { ...d._doc }
|
||||
|
||||
const updatedAt = dayjs(d.updatedAt).utc()
|
||||
|
||||
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
|
||||
finalLogs.since = updatedAt.fromNow()
|
||||
|
||||
return finalLogs
|
||||
})
|
||||
return finalLogs
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const logs = await ApplicationLog.find({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'asc' })
|
||||
|
||||
const deploy = await Deployment.findOne({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'desc' })
|
||||
|
||||
const finalLogs = {}
|
||||
finalLogs.progress = deploy.progress
|
||||
finalLogs.events = logs.map(log => log.event)
|
||||
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt))
|
||||
return finalLogs
|
||||
} catch (e) {
|
||||
throw new Error('No logs found')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { name } = request.query
|
||||
const service = await docker.engine.getService(`${name}_${name}`)
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { organization, name, branch } = request.body
|
||||
let found = false
|
||||
try {
|
||||
(await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.organization === organization &&
|
||||
running.repository.name === name &&
|
||||
running.repository.branch === branch) {
|
||||
found = running
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const deploys = await Deployment.find({ organization, branch, name })
|
||||
for (const deploy of deploys) {
|
||||
await ApplicationLog.deleteMany({ deployId: deploy.deployId })
|
||||
await Deployment.deleteMany({ deployId: deploy.deployId })
|
||||
}
|
||||
await execShellAsync(`docker stack rm ${found.build.container.name}`)
|
||||
reply.code(200).send({ organization, name, branch })
|
||||
} else {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
} catch (error) {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const { docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { name, organization, branch } = request.body
|
||||
const services = await docker.engine.listServices()
|
||||
const applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
const found = applications.find(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
if (branch) {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization && configuration.repository.branch === branch) {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
return JSON.parse(found.Spec.Labels.configuration)
|
||||
} else {
|
||||
reply.code(500).send({ message: 'No configuration found.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const latestDeployments = await Deployment.aggregate([
|
||||
{
|
||||
$sort: { createdAt: -1 }
|
||||
},
|
||||
{
|
||||
$group:
|
||||
{
|
||||
_id: {
|
||||
repoId: '$repoId',
|
||||
branch: '$branch'
|
||||
},
|
||||
createdAt: { $last: '$createdAt' },
|
||||
progress: { $first: '$progress' }
|
||||
}
|
||||
}
|
||||
])
|
||||
const serverLogs = await ServerLog.find()
|
||||
const services = await docker.engine.listServices()
|
||||
let applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
if (JSON.parse(r.Spec.Labels.configuration)) {
|
||||
const configuration = JSON.parse(r.Spec.Labels.configuration)
|
||||
const status = latestDeployments.find(l => configuration.repository.id === l._id.repoId && configuration.repository.branch === l._id.branch)
|
||||
if (status && status.progress) r.progress = status.progress
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
}
|
||||
return {}
|
||||
})
|
||||
databases = databases.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain + item.Spec.Labels.configuration.publish.path, item])).values()]
|
||||
return {
|
||||
serverLogs,
|
||||
applications: {
|
||||
deployed: applications
|
||||
},
|
||||
databases: {
|
||||
deployed: databases
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && error.errno === -2) {
|
||||
throw new Error(`Docker service unavailable at ${error.address}.`)
|
||||
} else {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const database = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId)
|
||||
if (database) {
|
||||
const jsonEnvs = {}
|
||||
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
const s = d.split('=')
|
||||
jsonEnvs[s[0]] = s[1]
|
||||
}
|
||||
const payload = {
|
||||
config: JSON.parse(database.Spec.Labels.configuration),
|
||||
envs: jsonEnvs
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('No database found?')
|
||||
}
|
||||
})
|
||||
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: { type: 'string', enum: ['mongodb', 'postgresql', 'mysql', 'couchdb'] }
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
let { type, defaultDatabaseName } = request.body
|
||||
const passwords = generator.generateMultiple(2, {
|
||||
length: 24,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
const usernames = generator.generateMultiple(2, {
|
||||
length: 10,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
// TODO: Query for existing db with the same name
|
||||
const nickname = getUniq()
|
||||
|
||||
if (!defaultDatabaseName) defaultDatabaseName = nickname
|
||||
|
||||
reply.code(201).send({ message: 'Deploying.' })
|
||||
// TODO: Persistent volume, custom inputs
|
||||
const deployId = cuid()
|
||||
const configuration = {
|
||||
general: {
|
||||
workdir: `/tmp/${deployId}`,
|
||||
deployId,
|
||||
nickname,
|
||||
type
|
||||
},
|
||||
database: {
|
||||
usernames,
|
||||
passwords,
|
||||
defaultDatabaseName
|
||||
},
|
||||
deploy: {
|
||||
name: nickname
|
||||
}
|
||||
}
|
||||
let generateEnvs = {}
|
||||
let image = null
|
||||
let volume = null
|
||||
if (type === 'mongodb') {
|
||||
generateEnvs = {
|
||||
MONGODB_ROOT_PASSWORD: passwords[0],
|
||||
MONGODB_USERNAME: usernames[0],
|
||||
MONGODB_PASSWORD: passwords[1],
|
||||
MONGODB_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mongodb:4.4'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
|
||||
} else if (type === 'postgresql') {
|
||||
generateEnvs = {
|
||||
POSTGRESQL_PASSWORD: passwords[0],
|
||||
POSTGRESQL_USERNAME: usernames[0],
|
||||
POSTGRESQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/postgresql:13.2.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
|
||||
} else if (type === 'couchdb') {
|
||||
generateEnvs = {
|
||||
COUCHDB_PASSWORD: passwords[0],
|
||||
COUCHDB_USER: usernames[0]
|
||||
}
|
||||
image = 'bitnami/couchdb:3'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
|
||||
} else if (type === 'mysql') {
|
||||
generateEnvs = {
|
||||
MYSQL_ROOT_PASSWORD: passwords[0],
|
||||
MYSQL_ROOT_USER: usernames[0],
|
||||
MYSQL_USER: usernames[1],
|
||||
MYSQL_PASSWORD: passwords[1],
|
||||
MYSQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mysql:8.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
|
||||
}
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[configuration.general.deployId]: {
|
||||
image,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
volumes: [volume],
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
update_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=database',
|
||||
'configuration=' + JSON.stringify(configuration)
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${configuration.general.deployId}-${type}-data`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.delete('/:dbName', async (request, reply) => {
|
||||
const { dbName } = request.params
|
||||
await execShellAsync(`docker stack rm ${dbName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
const axios = require('axios')
|
||||
const User = require('../../../models/User')
|
||||
const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: { type: 'string' }
|
||||
},
|
||||
required: ['code']
|
||||
}
|
||||
}
|
||||
}
|
||||
fastify.get('/app', { schema: githubCodeSchema }, async (request, reply) => {
|
||||
const { code } = request.query
|
||||
try {
|
||||
const { data } = await axios({
|
||||
method: 'post',
|
||||
url: `https://github.com/login/oauth/access_token?client_id=${fastify.config.VITE_GITHUB_APP_CLIENTID}&client_secret=${fastify.config.GITHUB_APP_CLIENT_SECRET}&code=${code}`,
|
||||
headers: {
|
||||
accept: 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
const token = data.access_token
|
||||
const githubAxios = axios.create({
|
||||
baseURL: 'https://api.github.com'
|
||||
})
|
||||
|
||||
githubAxios.defaults.headers.common.Accept = 'Application/json'
|
||||
githubAxios.defaults.headers.common.Authorization = `token ${token}`
|
||||
|
||||
try {
|
||||
let uid = cuid()
|
||||
const { avatar_url } = (await githubAxios.get('/user')).data // eslint-disable-line
|
||||
const email = (await githubAxios.get('/user/emails')).data.filter(
|
||||
(e) => e.primary
|
||||
)[0].email
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const registeredUsers = await User.find().countDocuments()
|
||||
const foundUser = await User.findOne({ email })
|
||||
if (foundUser) {
|
||||
await User.findOneAndUpdate(
|
||||
{ email },
|
||||
{ avatar: avatar_url },
|
||||
{ upsert: true, new: true }
|
||||
)
|
||||
uid = foundUser.uid
|
||||
} else {
|
||||
if (registeredUsers === 0) {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
const defaultSettings = new Settings({
|
||||
_id: new mongoose.Types.ObjectId()
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
await defaultSettings.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (!settings && registeredUsers > 0) {
|
||||
reply.code(500).send('Registration disabled, enable it in settings.')
|
||||
} else {
|
||||
if (!settings.allowRegistration) {
|
||||
reply.code(500).send('You are not allowed here!')
|
||||
} else {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const jwtToken = jwt.sign({}, fastify.config.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolLabs',
|
||||
issuer: 'coolLabs',
|
||||
jwtid: uid,
|
||||
subject: `User:${uid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
reply
|
||||
.code(200)
|
||||
.redirect(
|
||||
302,
|
||||
`/api/v1/login/github/success?jwtToken=${jwtToken}&ghToken=${token}`
|
||||
)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
return reply.sendFile('bye.html')
|
||||
})
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
const Server = require('../../../models/Logs/Server')
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const serverLogs = await Server.find().select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
return {
|
||||
serverLogs
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' },
|
||||
sendErrors: { type: 'boolean' }
|
||||
},
|
||||
required: []
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
let settings = await Settings.findOne({ applicationName }).select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
if (!settings) {
|
||||
settings = {
|
||||
applicationName,
|
||||
allowRegistration: false
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
const settings = await Settings.findOneAndUpdate(
|
||||
{ applicationName },
|
||||
{ applicationName, ...request.body },
|
||||
{ upsert: true, new: true }
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
await saveServerLog(error)
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
reply.code(200).send('NO')
|
||||
})
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"')
|
||||
await saveServerLog({ message: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"')
|
||||
await saveServerLog({ message: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
const User = require('../../models/User')
|
||||
const jwt = require('jsonwebtoken')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const { authorization } = request.headers
|
||||
if (!authorization) {
|
||||
reply.code(401).send({})
|
||||
return
|
||||
}
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
found ? reply.code(200).send({}) : reply.code(401).send({})
|
||||
} catch (error) {
|
||||
reply.code(401).send({})
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp } = require('../../../libs/common')
|
||||
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration, precheckDeployment } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const cloneRepository = require('../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// TODO: Add this to fastify plugin
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ref: { type: 'string' },
|
||||
repository: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' },
|
||||
full_name: { type: 'string' }
|
||||
},
|
||||
required: ['id', 'full_name']
|
||||
},
|
||||
installation: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' }
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
required: ['ref', 'repository', 'installation']
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
let configuration
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
if (request.headers['x-github-event'] !== 'push') {
|
||||
reply.code(500).send({ error: 'Not a push event.' })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
|
||||
) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
})
|
||||
if (!configuration) {
|
||||
reply.code(500).send({ error: 'No configuration found.' })
|
||||
return
|
||||
}
|
||||
|
||||
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
|
||||
await cloneRepository(configuration)
|
||||
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({ services, configuration })
|
||||
|
||||
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
queueAndBuild(configuration, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
} catch (error) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId } = configuration.general
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
if (error.name === 'Error') {
|
||||
// Error during runtime
|
||||
await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
} else {
|
||||
// Error in my code
|
||||
const payload = { message: error.message, stack: error.stack, type: 'spaghetticode' }
|
||||
if (error.message && error.stack) await new ServerLog(payload).save()
|
||||
if (reply.sent) await new ApplicationLog({ repoId: id, branch, deployId, event: `[ERROR 😖]: ${error.stack}` }).save()
|
||||
}
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: [
|
||||
'DOMAIN',
|
||||
'EMAIL',
|
||||
'VITE_GITHUB_APP_CLIENTID',
|
||||
'GITHUB_APP_CLIENT_SECRET',
|
||||
'GITHUB_APP_PRIVATE_KEY',
|
||||
'GITHUP_APP_WEBHOOK_SECRET',
|
||||
'JWT_SIGN_KEY',
|
||||
'SECRETS_ENCRYPTION_KEY'
|
||||
],
|
||||
properties: {
|
||||
DOMAIN: {
|
||||
type: 'string'
|
||||
},
|
||||
EMAIL: {
|
||||
type: 'string'
|
||||
},
|
||||
VITE_GITHUB_APP_CLIENTID: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_CLIENT_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_PRIVATE_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUP_APP_WEBHOOK_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
JWT_SIGN_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
DOCKER_ENGINE: {
|
||||
type: 'string',
|
||||
default: '/var/run/docker.sock'
|
||||
},
|
||||
DOCKER_NETWORK: {
|
||||
type: 'string',
|
||||
default: 'coollabs'
|
||||
},
|
||||
SECRETS_ENCRYPTION_KEY: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { schema }
|
||||
117
api/server.js
117
api/server.js
@@ -1,117 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const axios = require('axios')
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const { execShellAsync } = require('./libs/common')
|
||||
const { purgeImagesContainers, cleanupStuckedDeploymentsInDB } = require('./libs/applications/cleanup')
|
||||
const fastify = require('fastify')({
|
||||
trustProxy: true,
|
||||
logger: {
|
||||
level: 'error'
|
||||
}
|
||||
})
|
||||
fastify.register(require('../api/libs/http-error'))
|
||||
|
||||
const { schema } = require('./schema')
|
||||
|
||||
process.on('unhandledRejection', async (reason, p) => {
|
||||
await saveServerLog({ message: reason.message, type: 'unhandledRejection' })
|
||||
})
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../dist/')
|
||||
})
|
||||
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
reply.sendFile('index.html')
|
||||
})
|
||||
} else {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../public/')
|
||||
})
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
`mongodb://${process.env.MONGODB_USER}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DB}?authSource=${process.env.MONGODB_DB}&readPreference=primary&ssl=false`,
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
} else {
|
||||
mongoose.connect(
|
||||
'mongodb://localhost:27017/coolify?&readPreference=primary&ssl=false',
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
}
|
||||
|
||||
mongoose.connection.on(
|
||||
'error',
|
||||
console.error.bind(console, 'connection error:')
|
||||
)
|
||||
mongoose.connection.once('open', async function () {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.listen(3000, '0.0.0.0')
|
||||
console.log('Coolify API is up and running in production.')
|
||||
} else {
|
||||
const logFile = fs.createWriteStream('api/development/console.log', { flags: 'w' })
|
||||
const logStdout = process.stdout
|
||||
|
||||
console.log = function (d) {
|
||||
logFile.write(`[INFO]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.error = function (d) {
|
||||
logFile.write(`[ERROR]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.warn = function (d) {
|
||||
logFile.write(`[WARN]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
try {
|
||||
const { main } = (await axios.get('https://get.coollabs.io/version.json')).data.coolify
|
||||
if (main.clearServerLogs) {
|
||||
await mongoose.connection.db.dropCollection('logs-servers')
|
||||
}
|
||||
} catch (error) {
|
||||
// Could not cleanup logs-servers collection
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
try {
|
||||
await cleanupStuckedDeploymentsInDB()
|
||||
} catch (error) {
|
||||
// Could not cleanup DB 🤔
|
||||
}
|
||||
try {
|
||||
// Doing because I do not want to prune these images. Prune skips coolify-reserve labeled images.
|
||||
const basicImages = ['nginx:stable-alpine', 'node:lts', 'ubuntu:20.04', 'php:apache', 'rust:latest']
|
||||
for (const image of basicImages) {
|
||||
await execShellAsync(`echo "FROM ${image}" | docker build --label coolify-reserve=true -t ${image} -`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Could not pull some basic images from Docker Hub.')
|
||||
console.log(error)
|
||||
}
|
||||
try {
|
||||
await purgeImagesContainers()
|
||||
} catch (error) {
|
||||
console.log('Could not purge containers/images.')
|
||||
console.log(error)
|
||||
}
|
||||
})
|
||||
11
data/docker/daemon.json
Normal file
11
data/docker/daemon.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "100m",
|
||||
"max-file": "5"
|
||||
},
|
||||
"features": {
|
||||
"buildkit": true
|
||||
},
|
||||
"live-restore": true
|
||||
}
|
||||
6
data/fluentd/Dockerfile-dev
Normal file
6
data/fluentd/Dockerfile-dev
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM fluent/fluent-bit:1.9.0
|
||||
COPY fluentbit-dev.conf /tmp/fluentbit.conf
|
||||
ENTRYPOINT ["/fluent-bit/bin/fluent-bit", "-c", "/tmp/fluentbit.conf"]
|
||||
# USER root
|
||||
# RUN ["gem", "install", "fluent-plugin-mongo"]
|
||||
# USER fluent
|
||||
24
data/fluentd/fluentbit-dev.conf
Normal file
24
data/fluentd/fluentbit-dev.conf
Normal file
@@ -0,0 +1,24 @@
|
||||
[INPUT]
|
||||
Name forward
|
||||
Listen 0.0.0.0
|
||||
Port 24224
|
||||
Buffer_Chunk_Size 32KB
|
||||
Buffer_Max_Size 64KB
|
||||
|
||||
[OUTPUT]
|
||||
Name influxdb
|
||||
Match *
|
||||
Host coolify-influxdb
|
||||
Port 8086
|
||||
Bucket containerlogs
|
||||
Org organization
|
||||
HTTP_Token supertoken
|
||||
Sequence_Tag _seq
|
||||
Tag_Keys container_name
|
||||
[OUTPUT]
|
||||
Name http
|
||||
Match *
|
||||
Host host.docker.internal
|
||||
Port 3000
|
||||
URI /logs.json
|
||||
Format json
|
||||
28
data/fluentd/fluentd-dev.conf
Normal file
28
data/fluentd/fluentd-dev.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
<source>
|
||||
@type forward
|
||||
port 24224
|
||||
bind 0.0.0.0
|
||||
</source>
|
||||
|
||||
<match **>
|
||||
@type http
|
||||
endpoint http://host.docker.internal:3000/logs.json
|
||||
<buffer>
|
||||
flush_at_shutdown true
|
||||
flush_mode immediate
|
||||
flush_thread_count 8
|
||||
flush_thread_interval 1
|
||||
flush_thread_burst_interval 1
|
||||
retry_forever true
|
||||
retry_type exponential_backoff
|
||||
</buffer>
|
||||
</match>
|
||||
|
||||
<filter docker.**>
|
||||
@type parser
|
||||
key_name log
|
||||
reserve_data true
|
||||
<parse>
|
||||
@type json
|
||||
</parse>
|
||||
</filter>
|
||||
6
data/haproxy-http.Dockerfile
Normal file
6
data/haproxy-http.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-http.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
data/haproxy-tcp.Dockerfile
Normal file
6
data/haproxy-tcp.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-tcp.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
data/haproxy.Dockerfile
Normal file
6
data/haproxy.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
29
data/haproxy/dataplaneapi.hcl
Normal file
29
data/haproxy/dataplaneapi.hcl
Normal file
@@ -0,0 +1,29 @@
|
||||
config_version = 2
|
||||
name = "easy_gar"
|
||||
mode = "single"
|
||||
status = "null"
|
||||
|
||||
dataplaneapi {
|
||||
host = "0.0.0.0"
|
||||
port = 5555
|
||||
|
||||
transaction {
|
||||
transaction_dir = "/tmp/haproxy"
|
||||
}
|
||||
|
||||
advertised {
|
||||
api_address = ""
|
||||
api_port = 0
|
||||
}
|
||||
}
|
||||
|
||||
haproxy {
|
||||
config_file = "/usr/local/etc/haproxy/haproxy.cfg"
|
||||
haproxy_bin = "/usr/local/sbin/haproxy"
|
||||
|
||||
reload {
|
||||
reload_delay = 2
|
||||
reload_cmd = "kill -HUP 1"
|
||||
restart_cmd = "kill -SIGUSR2 1"
|
||||
}
|
||||
}
|
||||
19
data/haproxy/haproxy.cfg-http.template
Normal file
19
data/haproxy/haproxy.cfg-http.template
Normal file
@@ -0,0 +1,19 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 120s
|
||||
timeout connect 20s
|
||||
timeout client 120s
|
||||
timeout server 120s
|
||||
|
||||
frontend "${APP}"
|
||||
mode http
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode http
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
@@ -0,0 +1,15 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode tcp
|
||||
log global
|
||||
|
||||
frontend "${APP}"
|
||||
mode tcp
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode tcp
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
38
data/haproxy/haproxy.cfg.template
Normal file
38
data/haproxy/haproxy.cfg.template
Normal file
@@ -0,0 +1,38 @@
|
||||
global
|
||||
stats socket /var/run/api.sock user haproxy group haproxy mode 660 level admin expose-fd listeners
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 120s
|
||||
timeout connect 20s
|
||||
timeout client 120s
|
||||
timeout server 120s
|
||||
|
||||
userlist haproxy-dataplaneapi
|
||||
user admin insecure-password "${HAPROXY_PASSWORD}"
|
||||
|
||||
frontend http
|
||||
mode http
|
||||
bind :80
|
||||
bind :443 ssl crt /usr/local/etc/haproxy/ssl/ alpn h2,http/1.1
|
||||
acl is_certbot path_beg /.well-known/acme-challenge/
|
||||
use_backend backend-certbot if is_certbot
|
||||
use_backend %[req.hdr(host),lower]
|
||||
|
||||
frontend stats
|
||||
bind *:8404
|
||||
stats enable
|
||||
stats uri /
|
||||
stats refresh 5s
|
||||
stats admin if TRUE
|
||||
stats auth "${HAPROXY_USERNAME}:${HAPROXY_PASSWORD}"
|
||||
|
||||
backend backend-certbot
|
||||
mode http
|
||||
server certbot host.docker.internal:9080
|
||||
|
||||
program api
|
||||
command /usr/bin/dataplaneapi -f /usr/local/etc/haproxy/dataplaneapi.hcl --userlist haproxy-dataplaneapi
|
||||
no option start-on-reload
|
||||
81
data/haproxy/ssl/default.pem
Normal file
81
data/haproxy/ssl/default.pem
Normal file
@@ -0,0 +1,81 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFETCCAvkCFE/5JtU5geT5hOjFuQPiLgCYHwsOMA0GCSqGSIb3DQEBCwUAMEUx
|
||||
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
|
||||
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjExMDExMDkwNzQ1WhcNMzExMDA5MDkw
|
||||
NzQ1WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
|
||||
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC
|
||||
Ag8AMIICCgKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
ATANBgkqhkiG9w0BAQsFAAOCAgEAGQED96wBGzbMUlk9mIvZeLerzEAB3YfgfAYa
|
||||
EAi79QHxM8UX06xmA2xtGvJSvlU8Xods9vxpBmIUnbDRTIAHNDApT19+vPg/iSfQ
|
||||
1J9Fo4b5kjmWL6SalEdYcxqH9V/QndHta4MXP91u/ZsJ/exwDTZFatXsfGkPjUmN
|
||||
Xp+Ip6iQg7+kV3JpRnMSbevj2Oujs7qTAdQedH38ZTNS0AaM5gvZyQkccCTKNBQ4
|
||||
3O8MhCau7U0EUirndqsQXa0D3o78FpKztLNXSM7919jU2y36kMrWXfArfrBKHJ9b
|
||||
nZeO7nkbHgvmVS8NTg9pR7L7u+YXTa2p1H2ZnpMQvruV7iL/Pb1H2N68UdvnQScL
|
||||
sgacGSzM6b6PVdWRbECiuzC0UyWLZo/LoU3DQFGoiDQ4e/B3+TMrvgFI0CnpAQ4w
|
||||
qiaVFJlRQeF4GaS4qHsN28OBliFATB3TXONFnz1aVkQlEHuh2+JbuL1b1lxvlX5t
|
||||
gBbu/GgAcP4Uy2z4PoDmempAvNi2kCcLB98m+jbFSMSB3nkrdj6MzyN7kW9bhk3T
|
||||
ClimxDmc23seprwLcxJUPP5q+HRB1VLKXLwIYxu+Up3g29d4k1Iy9nUUP9lITLTk
|
||||
blJxZ2BPuQqTLzyqmAEWa1HxljFC1b7oMp9a98PbxC3MxUggM7zx/rgXWxM8osib
|
||||
uwSZmw0=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
AQKCAgEAm1/z33Jwk4crTQAjJ0uBqxm1pW/ndSq4MO8cEzEGjL8F7iWK+/P8LiGV
|
||||
+sPWuuRzX7/N3OVDiFOgnqeniNWV7vK7XE9T0GMN4ALiyVW/D4mIxKOeA7jXycOq
|
||||
aap0DPdCFFbZVLkL10Vhp77LyHFjEsJn/4oTBRk0y1LG/as9bOMD6j29/X7hEL20
|
||||
LOU4LQzEW26YU7lqD+nKlijFjHYSTolRrOBPe/fE1BxxXLFOKfMKbcaygc8xCzTu
|
||||
fhQ8Nep45BtSuQ9Yq/WfSLFecemWR8yvH0k37yxjBknHVD23maZ+/PEEPKWM/2+g
|
||||
IzGsmZrBILVmOb2/v9CWxqY0JEfQ6aU/nLW1ZiXSOIPmKEooK/hPVxFIyQ1yET4G
|
||||
kQZ5RroY/QDrI14ms8P0iDzZ8K3EFKUyjiBbc83Mb0YIZ4hKd76gioOUIPeEQB+y
|
||||
QLZ8Cb9YS3V8uIOJg+F4xlpJSePAZphfSxRLojSiKUeCs8gNUxGz0zwiMNf1p76F
|
||||
8CaLgvSwT/cgQjWitMeeE1Ha+8lY8VzESmd10gPk2uES/qdrmMhwFwovPqqrtMqj
|
||||
kMrFKNy7Crka6me3dhKEtryRTk5ho2IS/VCy/eXQ7lUW8Cl4uFxmjpHYSJMqDWvC
|
||||
vu1p3/B1psSZIy2V2M9QqwZCysHqvGJMOCvYmnc6T62+kDRKQ7ECggEBAOS/1ptA
|
||||
75OBAsHLovkspiCvn3gb/VTvH6LOvxYTohjr5iBeX137vg0aR1rg0jwcdf8EEJYw
|
||||
4YxOid7KmV7O25ujzduQgwpVgujnJAeBLeLDC5dVbq3PQah61AvR2O/7t+Ls3oxi
|
||||
cWh/OHC6SeZ/n406cxSCCUpVwtgHTaNFzaSmpDdEOSbjvXjQQjiRsG7j/1u64riq
|
||||
RlJ/hIUlcys0g94yeN/5lPaNfsq0+vTSAYuTVVXVbEntwWcZVZxnQJviZVgJ99zM
|
||||
RzE3sprvvr+I5QQ4FRMn0W9U7gblSJd5FGEL8gye4SRd+LxoUL4DR6pfuwd0vlXA
|
||||
g+dgiOKoHm2Bb8cCggEBAMDMHMNR6uipdMivPjBTlklnaYd9SY3c5x65yNtx4CNh
|
||||
rXyvy/6YvME7PPnKQZ8TQ4DkbVDUCAF7wnyAJJ7eWMav3bNlqWWjzaBvQz4Fn0XG
|
||||
/1W5R1CoJ9DW5FY3f9efJzQTmfn6dIlCx1gW7XfVBZQqI1LORMWUYenk0KAvjlg2
|
||||
UHYYl/BT1RhtYyzOJHto1PaUvCNDiOiDWAkTpLigYm7hGgVmcSwbo4F54SNUHdV7
|
||||
yz3CorCM4VsEYYSL80WHYxf/Zc+mcIDoWOdog0iEeK/Zu++yG5lPRxC1862GmsZA
|
||||
J06BMqX+NVGOfiGcVaGH+SZJXeFcrr7F8ZWp6y38QSMCggEAJwzo4hAv1gqMIfFV
|
||||
nRwWMDZLDwIYOUupJu4MiQRJA+AhpRz3QuAbDbmSvNzshv6E1kgnXLxzhLRTrQkB
|
||||
LcI6k1NfbUA6XqVCd+gdqnpPDwslC2y2PE3Jc62kTXBBjJZ4SfEN/QFBQwmU5Qmo
|
||||
XAUlg8KaqsGYPGxvmtmEU38zIAyitByddRoj2mATLf0RFZ0ulsZMtiG7Z5IFWYWP
|
||||
J60LZf9Py0ycNYrqPkivHuRLBzzbsI+CsQw5nBQjHVQzH2mCy4jIG5V0Ad70Sqbq
|
||||
9V+1WQcJ8f82Lb9d8ydpQRKWfArCA42L+d1g/SkBv65nqZo2H4u6goEfA3zjYW45
|
||||
44/ZOQKCAQB440MhwYqe6ioc76z51l+ElUAZQZjOR/XvUSS9XHDjHosOhJhPgmvQ
|
||||
aZl5MrXkzcpk1lYo+Vovu+8d66eKqfZWVs2XgCYwYf48G6e5CwNsWDOgB7XMwDN/
|
||||
Ak9YNCKIC/Yj9Cp3EPDjZCjkdjPeEIcX+Tf+4vFCRiEC7INX/ZmufBgFhLQ4cAhM
|
||||
8cHexT8g1oG6P1acces1h626u0NstLwjtCeBvVM3CfmC5O4jHco7Iw00I4epVhyz
|
||||
2lJfLvWR4itjT7QB+OXQHmAocWLoJJAcC1WJHU+q2IfB1aT+aElCB9XdpqsgY/4A
|
||||
rm0uG/2hdEXoGNaxyVCUtD8fzdR2GBarAoIBACCYXXREMYb6i1TbR5Q2LvVQUPsO
|
||||
Hgnbr+PLmx93rfUzDcr5r+cJgryjYQDKJTRleDJhg80M3RYOq+IOdl6yxOmRATmJ
|
||||
ZDgwRVD1F6VFxBJePcAW30FI5CoBogsHaZQDKGsopEaDRLK5E3QHUVG5qj323RdI
|
||||
Unf1++wI4nw+qwsVf1gSTcAdzq29v3NIWUyvvrmTNO4MxFTt0/lqkCsdT/2EFQDB
|
||||
/yQ1HCtQQjXE1xlYh0BnMZp9+4FmrlMC9Oj5H0dDSWmInPION0ft8/SjBj4TQ5Qi
|
||||
2DUo1WOWQnVR8Bxz0B8McXS+dOmgLe8ws4/ez7DoEVqHTgirKqBg5qRFQKw=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
1
data/prisma/build-prisma-engine.sh
Normal file
1
data/prisma/build-prisma-engine.sh
Normal file
@@ -0,0 +1 @@
|
||||
docker build --platform linux/amd64,linux/arm64 -t coollabsio/prisma-engine -f prisma-engine.Dockerfile --push .
|
||||
10
data/prisma/prisma-engine.Dockerfile
Normal file
10
data/prisma/prisma-engine.Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM rust:1.58.1-alpine3.14 as prisma
|
||||
WORKDIR /prisma
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
RUN apk --no-cache add openssl direnv git musl-dev openssl-dev build-base perl protoc
|
||||
RUN git clone --depth=1 --branch=3.12.x https://github.com/prisma/prisma-engines.git /prisma
|
||||
RUN cargo build --release
|
||||
|
||||
FROM alpine
|
||||
WORKDIR /prisma-engines
|
||||
COPY --from=prisma /prisma/target/release/query-engine /prisma/target/release/migration-engine /prisma/target/release/introspection-engine /prisma/target/release/prisma-fmt /prisma-engines/
|
||||
0
db/.gitkeep
Normal file
0
db/.gitkeep
Normal file
35
docker-compose-dev.yaml
Normal file
35
docker-compose-dev.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:6.2-alpine
|
||||
container_name: coolify-redis
|
||||
networks:
|
||||
- coolify-infra
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 6379
|
||||
protocol: tcp
|
||||
mode: host
|
||||
# fluentbit:
|
||||
# container_name: coolify-fluentbit
|
||||
# build:
|
||||
# context: ./data/fluentd
|
||||
# dockerfile: Dockerfile-dev
|
||||
# ports:
|
||||
# - target: 24224
|
||||
# published: 24224
|
||||
# protocol: tcp
|
||||
# mode: host
|
||||
# - target: 24224
|
||||
# published: 24224
|
||||
# protocol: udp
|
||||
# mode: host
|
||||
# networks:
|
||||
# - coolify-infra
|
||||
# extra_hosts:
|
||||
# - 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
23
docker-compose-haproxy.yaml
Normal file
23
docker-compose-haproxy.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
haproxy:
|
||||
image: coollabsio/coolify-haproxy-alpine:latest
|
||||
container_name: coolify-haproxy
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
- coolify
|
||||
volumes:
|
||||
- './data/haproxy/:/usr/local/etc/haproxy/'
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
- '8404:8404'
|
||||
- '5555:5555'
|
||||
- '3306:3306'
|
||||
|
||||
networks:
|
||||
coolify:
|
||||
attachable: true
|
||||
name: coolify
|
||||
41
docker-compose.yaml
Normal file
41
docker-compose.yaml
Normal file
@@ -0,0 +1,41 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
coolify:
|
||||
image: coollabsio/coolify:${TAG:-latest}
|
||||
restart: always
|
||||
container_name: coolify
|
||||
ports:
|
||||
- target: 3000
|
||||
published: 3000
|
||||
protocol: tcp
|
||||
mode: host
|
||||
volumes:
|
||||
- 'coolify-db:/app/db'
|
||||
- 'coolify-ssl-certs:/app/ssl'
|
||||
- 'coolify-letsencrypt:/etc/letsencrypt'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
env_file:
|
||||
- '.env'
|
||||
networks:
|
||||
- coolify-infra
|
||||
depends_on: ['redis']
|
||||
redis:
|
||||
image: redis:6.2-alpine
|
||||
restart: always
|
||||
container_name: coolify-redis
|
||||
networks:
|
||||
- coolify-infra
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
|
||||
volumes:
|
||||
coolify-db:
|
||||
name: coolify-db
|
||||
coolify-ssl-certs:
|
||||
name: coolify-ssl-certs
|
||||
coolify-letsencrypt:
|
||||
name: coolify-letsencrypt
|
||||
20
index.html
20
index.html
@@ -1,20 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="/favicon.png" />
|
||||
<link rel="preload" as="image" href="/favicon.png">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>coolify: Heroku & Netlify alternative</title>
|
||||
<link rel="dns-prefetch" href="https://cdn.coollabs.io/" />
|
||||
<link rel="preconnect" href="https://cdn.coollabs.io/" crossorigin="" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/fonts/montserrat/montserrat.css" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/css/microtip-0.2.2.min.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script type="module" src="/src/index.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
88
install.sh
88
install.sh
@@ -1,88 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
preTasks() {
|
||||
echo '
|
||||
##############################
|
||||
#### Pulling Git Updates #####
|
||||
##############################'
|
||||
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git pull
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##############################
|
||||
#### Building Base Image #####
|
||||
##############################'
|
||||
docker build --label coolify-reserve=true -t coolify-base -f install/Dockerfile-base .
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
####################################
|
||||
#### Ooops something not okay! #####
|
||||
####################################'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo '
|
||||
##################################
|
||||
#### Checking configuration. #####
|
||||
##################################'
|
||||
docker run --rm -w /usr/src/app coolify-base node install/install.js --check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '
|
||||
##################################
|
||||
#### Missing configuration ! #####
|
||||
##################################'
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
"all")
|
||||
preTasks
|
||||
echo '
|
||||
#################################
|
||||
#### Rebuilding everything. #####
|
||||
#################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type all
|
||||
;;
|
||||
"coolify")
|
||||
preTasks
|
||||
echo '
|
||||
##############################
|
||||
#### Rebuilding Coolify. #####
|
||||
##############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type coolify
|
||||
;;
|
||||
"proxy")
|
||||
preTasks
|
||||
echo '
|
||||
############################
|
||||
#### Rebuilding Proxy. #####
|
||||
############################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type proxy
|
||||
;;
|
||||
"upgrade-phase-1")
|
||||
preTasks
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P1. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type upgrade
|
||||
;;
|
||||
"upgrade-phase-2")
|
||||
echo '
|
||||
################################
|
||||
#### Upgrading Coolify P2. #####
|
||||
################################'
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/update.js --type upgrade
|
||||
;;
|
||||
*)
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,5 +0,0 @@
|
||||
FROM coolify-base
|
||||
WORKDIR /usr/src/app
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,19 +0,0 @@
|
||||
FROM ubuntu:20.04 as binaries
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN chmod +x /usr/bin/envsubst
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
|
||||
FROM node:14 as modules
|
||||
COPY --from=binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
RUN curl -L https://pnpm.js.org/pnpm.js | node - add --global pnpm
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package*.json .
|
||||
RUN pnpm install
|
||||
|
||||
FROM modules
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
WORKDIR /usr/src/app
|
||||
RUN curl -fsSL https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz | tar -xzvf - docker/docker -C . --strip-components 1
|
||||
RUN mv /usr/src/app/docker /usr/bin/docker
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq /usr/bin/docker
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm
|
||||
COPY ./*package.json .
|
||||
RUN pnpm install
|
||||
COPY . .
|
||||
RUN pnpm build
|
||||
CMD ["pnpm", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,10 +0,0 @@
|
||||
Some of the files are here for backwards compatibility.
|
||||
|
||||
I will do things after 2 months:
|
||||
|
||||
- rm ./install.js and ./update.js
|
||||
- rm ../install.sh
|
||||
- rm ./Dockerfile-base
|
||||
- rm ./obs
|
||||
- rm ./check.js "No need to check env file. During installation, it is checked by the installer. If you change it between to upgrades: 🤷♂️"
|
||||
- Rename Dockerfile-new to Dockerfile
|
||||
@@ -1,24 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 8080
|
||||
published: 8080
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --api.dashboard=true
|
||||
- --api.debug=true
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.4
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=false
|
||||
- --api.dashboard=false
|
||||
- --api.debug=false
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --providers.docker.swarmModeRefreshSeconds=1s
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge=true
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web
|
||||
- --certificatesresolvers.letsencrypt.acme.email=${EMAIL}
|
||||
- --certificatesresolvers.letsencrypt.acme.storage=/data/coolify/acme.json
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /data/coolify:/data/coolify
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
coolify:
|
||||
image: coolify
|
||||
hostname: coollabs-coolify
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
command: "yarn start"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.coolify.entrypoints=websecure"
|
||||
- "traefik.http.routers.coolify.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.routers.coolify.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.services.coolify.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.coolify.middlewares=global-compress"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
const options = program.opts()
|
||||
if (options.check) {
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
} else {
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
shell.exec(`docker network create ${process.env.DOCKER_NETWORK} --driver overlay`, { silent: !options.debug })
|
||||
shell.exec('docker build -t coolify -f install/Dockerfile .')
|
||||
if (options.type === 'all') {
|
||||
shell.exec('docker stack rm coollabs-coolify', { silent: !options.debug })
|
||||
} else if (options.type === 'coolify') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
} else if (options.type === 'proxy') {
|
||||
shell.exec('docker service rm coollabs-coolify_proxy')
|
||||
}
|
||||
if (options.type !== 'upgrade') {
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
}
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM coolify-base-nodejs
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
@@ -1,6 +0,0 @@
|
||||
FROM node:lts
|
||||
LABEL coolify-preserve=true
|
||||
COPY --from=coolify-binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=coolify-binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
COPY --from=coolify-binaries /usr/bin/jq /usr/bin/jq
|
||||
RUN curl -f https://get.pnpm.io/v6.js | node - add --global pnpm@6
|
||||
@@ -1,9 +0,0 @@
|
||||
FROM ubuntu:20.04
|
||||
LABEL coolify-preserve=true
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN curl -L https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -o /usr/bin/jq
|
||||
RUN chmod +x /usr/bin/envsubst /usr/bin/jq
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
@@ -1,21 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
const options = program.opts()
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (options.type === 'upgrade') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !options.debug, shell: '/bin/bash' })
|
||||
}
|
||||
157
package.json
157
package.json
@@ -1,68 +1,93 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
|
||||
"version": "1.0.8",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"start": "NODE_ENV=production node api/server",
|
||||
"dev": "run-p dev:db dev:routify dev:svite dev:server",
|
||||
"dev:db": "NODE_ENV=development node api/development/mongodb.js",
|
||||
"dev:server": "nodemon -w api api/server",
|
||||
"dev:routify": "routify run",
|
||||
"dev:svite": "svite",
|
||||
"build": "run-s build:routify build:svite",
|
||||
"build:routify": "routify run -b",
|
||||
"build:svite": "svite build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"@roxi/routify": "^2.15.1",
|
||||
"@zerodevx/svelte-toast": "^0.2.1",
|
||||
"ajv": "^8.1.0",
|
||||
"axios": "^0.21.1",
|
||||
"commander": "^7.2.0",
|
||||
"compare-versions": "^3.6.0",
|
||||
"cuid": "^2.1.8",
|
||||
"dayjs": "^1.10.4",
|
||||
"deepmerge": "^4.2.2",
|
||||
"dockerode": "^3.2.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"fastify": "^3.14.2",
|
||||
"fastify-env": "^2.1.0",
|
||||
"fastify-jwt": "^2.4.0",
|
||||
"fastify-plugin": "^3.0.0",
|
||||
"fastify-static": "^4.0.1",
|
||||
"generate-password": "^1.6.0",
|
||||
"http-errors-enhanced": "^0.7.0",
|
||||
"js-yaml": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"mongoose": "^5.12.3",
|
||||
"shelljs": "^0.8.4",
|
||||
"svelte-select": "^3.17.0",
|
||||
"unique-names-generator": "^4.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mongodb-memory-server-core": "^6.9.6",
|
||||
"nodemon": "^2.0.7",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^8.2.9",
|
||||
"postcss-import": "^14.0.1",
|
||||
"postcss-load-config": "^3.0.1",
|
||||
"postcss-preset-env": "^6.7.0",
|
||||
"prettier": "2.2.1",
|
||||
"prettier-plugin-svelte": "^2.2.0",
|
||||
"standard": "^16.0.3",
|
||||
"svelte": "^3.37.0",
|
||||
"svelte-hmr": "^0.14.0",
|
||||
"svelte-preprocess": "^4.7.0",
|
||||
"svite": "0.8.1",
|
||||
"tailwindcss": "2.1.1"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte",
|
||||
"routify",
|
||||
"fastify",
|
||||
"tailwind"
|
||||
]
|
||||
"name": "coolify",
|
||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||
"version": "2.6.1",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"dev": "docker-compose -f docker-compose-dev.yaml up -d && cross-env NODE_ENV=development & svelte-kit dev --host 0.0.0.0",
|
||||
"dev:stop": "docker-compose -f docker-compose-dev.yaml down",
|
||||
"dev:logs": "docker-compose -f docker-compose-dev.yaml logs -f --tail 10",
|
||||
"studio": "npx prisma studio",
|
||||
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node build/index.js",
|
||||
"build": "svelte-kit build",
|
||||
"preview": "svelte-kit preview",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||
"release:production:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:production:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:production:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:staging:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:staging:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:staging:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:haproxy": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-alpine:latest -t coollabsio/coolify-haproxy-alpine:1.1.0 -f data/haproxy.Dockerfile --push .",
|
||||
"release:haproxy:tcp": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-tcp-alpine:latest -t coollabsio/coolify-haproxy-tcp-alpine:1.1.0 -f data/haproxy-tcp.Dockerfile --push .",
|
||||
"release:haproxy:http": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-http-alpine:latest -t coollabsio/coolify-haproxy-http-alpine:1.1.0 -f data/haproxy-http.Dockerfile --push .",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-node": "1.0.0-next.73",
|
||||
"@sveltejs/kit": "1.0.0-next.316",
|
||||
"@types/js-cookie": "3.0.1",
|
||||
"@types/js-yaml": "4.0.5",
|
||||
"@types/node": "17.0.25",
|
||||
"@types/node-forge": "1.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "4.31.1",
|
||||
"@typescript-eslint/parser": "4.31.1",
|
||||
"@zerodevx/svelte-toast": "0.7.1",
|
||||
"autoprefixer": "10.4.4",
|
||||
"cross-env": "7.0.3",
|
||||
"cross-var": "1.1.0",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-svelte3": "3.4.1",
|
||||
"husky": "7.0.4",
|
||||
"lint-staged": "12.4.0",
|
||||
"postcss": "8.4.12",
|
||||
"prettier": "2.6.2",
|
||||
"prettier-plugin-svelte": "2.7.0",
|
||||
"prettier-plugin-tailwindcss": "0.1.10",
|
||||
"prisma": "3.11.1",
|
||||
"svelte": "3.47.0",
|
||||
"svelte-check": "2.7.0",
|
||||
"svelte-preprocess": "4.10.6",
|
||||
"svelte-select": "4.4.7",
|
||||
"sveltekit-i18n": "2.1.2",
|
||||
"tailwindcss": "3.0.24",
|
||||
"ts-node": "10.7.0",
|
||||
"tslib": "2.3.1",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.11.1",
|
||||
"@sentry/node": "6.19.6",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bullmq": "1.80.4",
|
||||
"compare-versions": "4.1.3",
|
||||
"cookie": "0.5.0",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.1",
|
||||
"dockerode": "3.3.1",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.0.3",
|
||||
"js-cookie": "3.0.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"mustache": "4.2.0",
|
||||
"node-forge": "1.3.1",
|
||||
"p-limit": "4.0.0",
|
||||
"svelte-kit-cookie-session": "2.1.3",
|
||||
"tailwindcss-scrollbar": "0.1.0",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
9204
pnpm-lock.yaml
generated
9204
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
postcss.config.cjs
Normal file
6
postcss.config.cjs
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: [
|
||||
require('postcss-import'),
|
||||
require('tailwindcss'),
|
||||
require('postcss-preset-env')({ stage: 1 })
|
||||
]
|
||||
}
|
||||
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user