mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 20:59:24 +00:00
Compare commits
939 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08d7593ca9 | ||
|
|
a50f7a7cc2 | ||
|
|
2c33447f9f | ||
|
|
d67a3f51ec | ||
|
|
2719974262 | ||
|
|
eb5aebd58d | ||
|
|
98dbf3d8a5 | ||
|
|
d9489a2cb4 | ||
|
|
95832d34f7 | ||
|
|
d3e9aea63d | ||
|
|
d6972e2ed1 | ||
|
|
50844e98be | ||
|
|
5c6fcfebf9 | ||
|
|
84cfe6fb42 | ||
|
|
abf0aeb2a8 | ||
|
|
a7aca0ce8b | ||
|
|
67bb5d973b | ||
|
|
662948d622 | ||
|
|
f5bedfdf7f | ||
|
|
db9db61d92 | ||
|
|
d255cb1973 | ||
|
|
6529271de2 | ||
|
|
0dd32b5319 | ||
|
|
b032da798b | ||
|
|
a1a9f1531e | ||
|
|
f71b54deb2 | ||
|
|
c63430e342 | ||
|
|
6821b128ad | ||
|
|
3f8d44a01c | ||
|
|
3aef04437c | ||
|
|
53e32c038b | ||
|
|
1660510614 | ||
|
|
69f5601b3e | ||
|
|
6e22fecc98 | ||
|
|
d18b2b6a1f | ||
|
|
4b0370ac08 | ||
|
|
750ef80777 | ||
|
|
59c62923be | ||
|
|
68b220d06e | ||
|
|
250ea64203 | ||
|
|
0ab57396d2 | ||
|
|
1e36856e65 | ||
|
|
cfdc8db543 | ||
|
|
1f25bc411f | ||
|
|
972f77c790 | ||
|
|
795f99bb47 | ||
|
|
54f7142b2b | ||
|
|
26eacfc2c0 | ||
|
|
e2bf02841f | ||
|
|
6a59b8d27c | ||
|
|
7fc43ef2bb | ||
|
|
70a3fc247e | ||
|
|
56ab8312f1 | ||
|
|
6fb6a514ac | ||
|
|
b01f5f47b3 | ||
|
|
ebdd3601b3 | ||
|
|
c0d711170b | ||
|
|
01ea86479d | ||
|
|
eb62888c39 | ||
|
|
b006fe8f68 | ||
|
|
dc3add495c | ||
|
|
59086e9eb4 | ||
|
|
e563988596 | ||
|
|
5a206a140c | ||
|
|
dbf910ff38 | ||
|
|
35b31dce2b | ||
|
|
1ec620be4b | ||
|
|
8516ac671a | ||
|
|
3b7fdebe8c | ||
|
|
17ac3048ac | ||
|
|
4e43efef50 | ||
|
|
4f4f5b1c01 | ||
|
|
1fa5c5e021 | ||
|
|
436e0e3a2b | ||
|
|
e717c1d599 | ||
|
|
ae5d90eb47 | ||
|
|
c095cb58b3 | ||
|
|
6bba37c36d | ||
|
|
60a428a952 | ||
|
|
16b7c1708b | ||
|
|
3435f92fcb | ||
|
|
cef571b8cc | ||
|
|
242bc61e2d | ||
|
|
c917135bd3 | ||
|
|
3802158ad5 | ||
|
|
e452f68614 | ||
|
|
9586213dd1 | ||
|
|
30781f218c | ||
|
|
697c42ff66 | ||
|
|
37d8f1847c | ||
|
|
2af13fff55 | ||
|
|
51e8ca8de0 | ||
|
|
06228cd2a7 | ||
|
|
0033baafdc | ||
|
|
79dfc6a660 | ||
|
|
972b0fa811 | ||
|
|
ad51a9ebc8 | ||
|
|
51a40d049d | ||
|
|
8b3113bd92 | ||
|
|
d6b6938555 | ||
|
|
ce52608f19 | ||
|
|
ede37d296b | ||
|
|
6374b1284b | ||
|
|
6ac8dd8907 | ||
|
|
24c655d7ef | ||
|
|
1f087cc29a | ||
|
|
c3684a1650 | ||
|
|
a410fd0776 | ||
|
|
271fb1358d | ||
|
|
a4d53a28eb | ||
|
|
e69e32f6c7 | ||
|
|
650409dde3 | ||
|
|
f3f4bb5105 | ||
|
|
9c02af6b52 | ||
|
|
6a3f4ba171 | ||
|
|
6a6426fe6b | ||
|
|
21256746c3 | ||
|
|
c34d643f95 | ||
|
|
0be402af82 | ||
|
|
b5b0b6524d | ||
|
|
22f1a3c908 | ||
|
|
fa5f439858 | ||
|
|
7cc760eecf | ||
|
|
af0652f6b2 | ||
|
|
9e009bebaa | ||
|
|
8e53ae3484 | ||
|
|
7ceb8f1537 | ||
|
|
b0eae8cfe9 | ||
|
|
febef372b8 | ||
|
|
a18e3659aa | ||
|
|
e2e342851a | ||
|
|
bee3292088 | ||
|
|
f56d4dbbb3 | ||
|
|
eccd7c96d7 | ||
|
|
4046c472ed | ||
|
|
0da4a1024a | ||
|
|
aa2f328640 | ||
|
|
4d22b610b6 | ||
|
|
e91c3eab9c | ||
|
|
2e8fd6f0c7 | ||
|
|
90fde24b40 | ||
|
|
02a1f50776 | ||
|
|
57b97a9204 | ||
|
|
1ec03693d3 | ||
|
|
4246d86694 | ||
|
|
2cce1f8459 | ||
|
|
3937cfec53 | ||
|
|
259aeeb67a | ||
|
|
9d53bc0926 | ||
|
|
1211f3c9fd | ||
|
|
c07d6aa702 | ||
|
|
4f662dbf21 | ||
|
|
a4301c5d23 | ||
|
|
86b7824c78 | ||
|
|
435f063c36 | ||
|
|
902a764ff2 | ||
|
|
4097378847 | ||
|
|
5f3567e808 | ||
|
|
7325353ced | ||
|
|
68f5b32876 | ||
|
|
8d4eaad920 | ||
|
|
4b38865cc9 | ||
|
|
030cb124e5 | ||
|
|
fd363ec017 | ||
|
|
8b813fb07a | ||
|
|
326f0dac1b | ||
|
|
828faaf2b1 | ||
|
|
9582664406 | ||
|
|
ec5474b72b | ||
|
|
62d1011d9f | ||
|
|
0a7ec6bd20 | ||
|
|
b84c37cd8f | ||
|
|
887d65e512 | ||
|
|
3543a9c809 | ||
|
|
40da3ff9fe | ||
|
|
2315192f4b | ||
|
|
0faa1540f4 | ||
|
|
00cab67e73 | ||
|
|
b92bc9eebb | ||
|
|
1905db16e8 | ||
|
|
3e9cf7285b | ||
|
|
6fdbc572fe | ||
|
|
f94e17134e | ||
|
|
3fd50ebb12 | ||
|
|
40cbee0d75 | ||
|
|
0eb7f4526e | ||
|
|
646d92757a | ||
|
|
51efa01b11 | ||
|
|
dc4a63ef92 | ||
|
|
1b717ac091 | ||
|
|
e93d97f2bc | ||
|
|
45c904e876 | ||
|
|
880865f1f2 | ||
|
|
8e42203b89 | ||
|
|
2bd91fa970 | ||
|
|
a3fd95020d | ||
|
|
e5b1ce4eef | ||
|
|
531973baab | ||
|
|
b6e6a1ccf1 | ||
|
|
1140afe2c9 | ||
|
|
f8f17832de | ||
|
|
caaf030517 | ||
|
|
106aee31bd | ||
|
|
c98ed5338a | ||
|
|
48fa4ff245 | ||
|
|
d75d2880e5 | ||
|
|
ec907b0ce4 | ||
|
|
2cda0b22c2 | ||
|
|
a0076db42e | ||
|
|
a37cf49c2a | ||
|
|
c4833c3cc2 | ||
|
|
d03fbd9224 | ||
|
|
5998212b82 | ||
|
|
62ccab22d6 | ||
|
|
5ccea1cfcc | ||
|
|
8ccb1bd34c | ||
|
|
c1a48dcf1e | ||
|
|
11d74c0c1f | ||
|
|
8290ee856f | ||
|
|
08332c8321 | ||
|
|
046f738b7d | ||
|
|
07708155ac | ||
|
|
df5e23c7c2 | ||
|
|
41adc02801 | ||
|
|
72b650b086 | ||
|
|
06fe3f33c0 | ||
|
|
cbabf7fc51 | ||
|
|
6aeafda604 | ||
|
|
30d656698e | ||
|
|
94d1af01df | ||
|
|
af97d399b6 | ||
|
|
2f90fd1fe6 | ||
|
|
c05a140b0b | ||
|
|
cbfb9a3844 | ||
|
|
5a227f70c6 | ||
|
|
44a102443d | ||
|
|
cf7fdf198d | ||
|
|
68f2f4f978 | ||
|
|
029b623f08 | ||
|
|
fe3702847a | ||
|
|
e9b852a30e | ||
|
|
1d4e5df5a2 | ||
|
|
5e14b72fe4 | ||
|
|
8ebff72cde | ||
|
|
e16643c48c | ||
|
|
65c8f55ee6 | ||
|
|
fbc81ab3eb | ||
|
|
a4d56fd79a | ||
|
|
ce45cb8aca | ||
|
|
7f8428cd17 | ||
|
|
14d79031c1 | ||
|
|
b8aa7b6d08 | ||
|
|
397ca7f20e | ||
|
|
e10b76a46b | ||
|
|
b46566280d | ||
|
|
3ab6a231eb | ||
|
|
2bc2ae9b6e | ||
|
|
2b28f8bd8f | ||
|
|
dcdac29135 | ||
|
|
591ee29e0d | ||
|
|
625e71ab08 | ||
|
|
b0af54587b | ||
|
|
be3080df08 | ||
|
|
04685c9f9d | ||
|
|
1a83f2635f | ||
|
|
630aa45c87 | ||
|
|
0c3a381d1f | ||
|
|
ffac7c5c87 | ||
|
|
410800e81c | ||
|
|
9481beb61f | ||
|
|
141f2481a7 | ||
|
|
ea18f25adc | ||
|
|
9018184747 | ||
|
|
4fc2dd55f5 | ||
|
|
5ef9a282eb | ||
|
|
93a6518974 | ||
|
|
07aa285b27 | ||
|
|
bf01e9e29f | ||
|
|
d70672ba4b | ||
|
|
5eeb519ed6 | ||
|
|
5f047e4adf | ||
|
|
56b9a376bd | ||
|
|
0a1d31a188 | ||
|
|
64c9fb9a1b | ||
|
|
47aad15cd5 | ||
|
|
260a47a366 | ||
|
|
fd4bbe17f0 | ||
|
|
25ff637703 | ||
|
|
f571453696 | ||
|
|
5cd7533972 | ||
|
|
3a252509d0 | ||
|
|
2bd3802a6f | ||
|
|
ce2757f514 | ||
|
|
8419cdf604 | ||
|
|
907c2414ae | ||
|
|
f82207564f | ||
|
|
991a09838c | ||
|
|
25df4bfd85 | ||
|
|
d2f89d001b | ||
|
|
1971f227fd | ||
|
|
c1adffe260 | ||
|
|
e725887a55 | ||
|
|
5bf79b75b0 | ||
|
|
6926975e40 | ||
|
|
978a01c968 | ||
|
|
f421f5ee84 | ||
|
|
383831c7b8 | ||
|
|
41329facf7 | ||
|
|
7d3c644148 | ||
|
|
7fab9b5930 | ||
|
|
58763ef84c | ||
|
|
0e6abf172b | ||
|
|
9e681ece41 | ||
|
|
28f87a306d | ||
|
|
23e8833208 | ||
|
|
03962663c2 | ||
|
|
cc2ec55c4d | ||
|
|
ff2c38aa16 | ||
|
|
b5a9a2cea8 | ||
|
|
cd3f661f7e | ||
|
|
41bf6b5b86 | ||
|
|
a4e7c85184 | ||
|
|
19aca9ab35 | ||
|
|
08704c289a | ||
|
|
2224c22c6e | ||
|
|
b281889acd | ||
|
|
cfc50a27b0 | ||
|
|
ed5f21da6a | ||
|
|
78f3eb81dd | ||
|
|
6a833934ce | ||
|
|
45bf6f77d1 | ||
|
|
a1b3b7b687 | ||
|
|
7ebcad6abb | ||
|
|
fed6d2bf07 | ||
|
|
bea4943e9f | ||
|
|
1979e431b8 | ||
|
|
9bead1d6b4 | ||
|
|
56c4295e16 | ||
|
|
7c7b5a61e5 | ||
|
|
abaa13fda8 | ||
|
|
042bfeddbb | ||
|
|
f45ab067ce | ||
|
|
97a6f04aaa | ||
|
|
27f1e1d7cd | ||
|
|
417c01d6e0 | ||
|
|
b2e7435d0f | ||
|
|
73c9cb1d51 | ||
|
|
41c5dd3b53 | ||
|
|
bb0c93dc2f | ||
|
|
7953c1df30 | ||
|
|
c3f4245164 | ||
|
|
369001febb | ||
|
|
7ec296be6b | ||
|
|
d2f5a58f3b | ||
|
|
f4315144af | ||
|
|
e92775887d | ||
|
|
a5f1b4b675 | ||
|
|
8f3f9ebade | ||
|
|
157e5fd7aa | ||
|
|
5e7e1c11c7 | ||
|
|
e8516bc831 | ||
|
|
e3f78a1cf9 | ||
|
|
3449e0f8fc | ||
|
|
66af12f9b5 | ||
|
|
13acf09dcc | ||
|
|
ce71dccbc1 | ||
|
|
d9ba1a0b5c | ||
|
|
0b709c93a8 | ||
|
|
1657e5a151 | ||
|
|
a165b21950 | ||
|
|
0d0715a340 | ||
|
|
1bd33fea98 | ||
|
|
76754ded79 | ||
|
|
4da27a46a2 | ||
|
|
039953588e | ||
|
|
b8b4f559db | ||
|
|
2b0df270df | ||
|
|
b96c1a23ec | ||
|
|
f779b3bb54 | ||
|
|
6462982d12 | ||
|
|
84b4cc5d54 | ||
|
|
1bd2ccbc16 | ||
|
|
3abe1610bf | ||
|
|
61716738ed | ||
|
|
4e819f6eba | ||
|
|
fedb38f2bc | ||
|
|
aae108032c | ||
|
|
020013683b | ||
|
|
70de2538e2 | ||
|
|
9f581c82a9 | ||
|
|
eb2e07afc5 | ||
|
|
9c47b8495c | ||
|
|
2f8d0ee60c | ||
|
|
5bf14f4639 | ||
|
|
9da08d600b | ||
|
|
4d47eab07c | ||
|
|
f2061c5c25 | ||
|
|
430fc66ed7 | ||
|
|
bcb84b8126 | ||
|
|
dd83e86bc3 | ||
|
|
3e8a8364dc | ||
|
|
be41c0dd02 | ||
|
|
a17b7a564e | ||
|
|
f3cdda29bc | ||
|
|
de37ee9f1c | ||
|
|
8212868b92 | ||
|
|
b44d8578d9 | ||
|
|
0358cf2de2 | ||
|
|
94da008a47 | ||
|
|
456b1b8074 | ||
|
|
78e6a7d1d3 | ||
|
|
76dc7ffb68 | ||
|
|
211aff7170 | ||
|
|
bcacefb841 | ||
|
|
4505ad37d8 | ||
|
|
18cf57f33c | ||
|
|
9f2f5b40c3 | ||
|
|
8a401f50cb | ||
|
|
51a5b3b602 | ||
|
|
68f9bca054 | ||
|
|
e9e92c6e9e | ||
|
|
008cfdba09 | ||
|
|
9973197fa5 | ||
|
|
ec3b94cf96 | ||
|
|
c4cb92c78d | ||
|
|
c390f82246 | ||
|
|
b4f98e24a1 | ||
|
|
e042c5cfde | ||
|
|
faeae8fd6c | ||
|
|
ae4942ba29 | ||
|
|
fd652bfce6 | ||
|
|
3d72167721 | ||
|
|
ba284bef9e | ||
|
|
d18bb9cc74 | ||
|
|
a7ed3e58db | ||
|
|
8405ebd28d | ||
|
|
352bb65125 | ||
|
|
fe2cc5a99a | ||
|
|
7a2f29f6a3 | ||
|
|
9a05bfa899 | ||
|
|
39fa64e20d | ||
|
|
3a835b420e | ||
|
|
82f7633c3a | ||
|
|
9fdac2741a | ||
|
|
8fb5260809 | ||
|
|
e08ec12d26 | ||
|
|
1202e00a21 | ||
|
|
4ba2205af4 | ||
|
|
09841ad4cb | ||
|
|
d2dcd0abc8 | ||
|
|
fe9d0503fb | ||
|
|
8e9e6607e5 | ||
|
|
e1efd9355f | ||
|
|
ca705bbf89 | ||
|
|
b70fe09d17 | ||
|
|
d7d570393f | ||
|
|
41ca265e5a | ||
|
|
03cde08d67 | ||
|
|
5684674bd7 | ||
|
|
4fe919f2ea | ||
|
|
c8c23c53ef | ||
|
|
b1c25e98d7 | ||
|
|
7ab5a4bfcf | ||
|
|
a3ee57995c | ||
|
|
32020fd336 | ||
|
|
f1313b6468 | ||
|
|
3ef093c7e6 | ||
|
|
f5dfaa81d3 | ||
|
|
fcf206a081 | ||
|
|
9790d2b613 | ||
|
|
c39cb42601 | ||
|
|
0ead17ab70 | ||
|
|
4a6062522e | ||
|
|
201fa82efc | ||
|
|
d28433ee64 | ||
|
|
cc348bf0f5 | ||
|
|
b023d65fcf | ||
|
|
bd15d85732 | ||
|
|
b4bbd22781 | ||
|
|
d4c972584a | ||
|
|
edef4bd4a0 | ||
|
|
448611039c | ||
|
|
305fab488e | ||
|
|
38f0546f05 | ||
|
|
8cb679711d | ||
|
|
4d11867500 | ||
|
|
8232a7468b | ||
|
|
03e7af12be | ||
|
|
39f2e28a11 | ||
|
|
53947d805b | ||
|
|
15f8e44237 | ||
|
|
5ce1bc1ec5 | ||
|
|
c36bd34a1a | ||
|
|
781d034484 | ||
|
|
e4f701b148 | ||
|
|
5160d0780e | ||
|
|
8cd561b8cc | ||
|
|
b8b57bc48b | ||
|
|
58406f055e | ||
|
|
b049297082 | ||
|
|
a284928352 | ||
|
|
fe787538e3 | ||
|
|
360fb5ea37 | ||
|
|
45af5cbef8 | ||
|
|
463dacbe59 | ||
|
|
13891110ce | ||
|
|
01e0fb70c9 | ||
|
|
c1c25d59c8 | ||
|
|
6ac54e17f4 | ||
|
|
a82805846f | ||
|
|
a53bda1436 | ||
|
|
6309074844 | ||
|
|
b80e0d15fb | ||
|
|
7a0d151467 | ||
|
|
c55505af6c | ||
|
|
a788b7bc13 | ||
|
|
5f27fc0770 | ||
|
|
b814c6e563 | ||
|
|
8f58b14629 | ||
|
|
269250ef3d | ||
|
|
a3241516cb | ||
|
|
943300509b | ||
|
|
92d1f5aa55 | ||
|
|
7a74ba1796 | ||
|
|
614eb923d8 | ||
|
|
066f5b25e0 | ||
|
|
18f7ab1b95 | ||
|
|
78293340cc | ||
|
|
c47457a17f | ||
|
|
d00629b627 | ||
|
|
ddfbda6f80 | ||
|
|
d910b21185 | ||
|
|
b60b832426 | ||
|
|
adfc976f41 | ||
|
|
1b43976ff0 | ||
|
|
321fb019eb | ||
|
|
f6858a68e0 | ||
|
|
741db1778b | ||
|
|
809f40dec9 | ||
|
|
f3b5de4697 | ||
|
|
fe17e2eaba | ||
|
|
22ef0b5d29 | ||
|
|
823279fb60 | ||
|
|
19f661706d | ||
|
|
986c5b7133 | ||
|
|
4e334d4fff | ||
|
|
dcf7f92aab | ||
|
|
f56361c0ca | ||
|
|
4946ca2d91 | ||
|
|
f6a91cb53c | ||
|
|
726fbbb52a | ||
|
|
29d2278579 | ||
|
|
72ceeff022 | ||
|
|
54d65ec011 | ||
|
|
96aef5c4a6 | ||
|
|
7b64166fb0 | ||
|
|
1f5908e0b8 | ||
|
|
a4562d18b6 | ||
|
|
875e232199 | ||
|
|
80f95a4674 | ||
|
|
17d56aa972 | ||
|
|
f4ba60cf8f | ||
|
|
0b8a648f13 | ||
|
|
2576a3af2c | ||
|
|
2e6c73fa3c | ||
|
|
b8d8ee4560 | ||
|
|
d9b74ada84 | ||
|
|
01b058151b | ||
|
|
989d952f35 | ||
|
|
908af3e024 | ||
|
|
819157fda1 | ||
|
|
5a4458e93f | ||
|
|
1fbd403f34 | ||
|
|
098e519c55 | ||
|
|
3ef4a242f9 | ||
|
|
ad3044dce1 | ||
|
|
e40541d831 | ||
|
|
2786e7dbaf | ||
|
|
196d681a63 | ||
|
|
d2353e3c35 | ||
|
|
2475031f88 | ||
|
|
cd15e68adc | ||
|
|
27431f779d | ||
|
|
b9b5a2faeb | ||
|
|
e471b11d3b | ||
|
|
a742a3d2e3 | ||
|
|
c615f6c07e | ||
|
|
a6ebfb08f7 | ||
|
|
2b0d162226 | ||
|
|
2c5f09a8bb | ||
|
|
ef073e586b | ||
|
|
82bfdb87e3 | ||
|
|
767e7b80cb | ||
|
|
8d26ea9063 | ||
|
|
1a7c4310d0 | ||
|
|
4e8fe79e2b | ||
|
|
a8c5551292 | ||
|
|
2bf73109b2 | ||
|
|
f0ab3750bd | ||
|
|
58a11e37fe | ||
|
|
927bf46304 | ||
|
|
6b89857697 | ||
|
|
b72e5ccef6 | ||
|
|
6617b7811b | ||
|
|
e1c1988db4 | ||
|
|
af99ea4678 | ||
|
|
a6d5316090 | ||
|
|
f5e7a84fa6 | ||
|
|
c013764b61 | ||
|
|
2320ab0dfc | ||
|
|
1281a0f7e4 | ||
|
|
d8350cd4ee | ||
|
|
e3b7c23ed9 | ||
|
|
eae1ea21d6 | ||
|
|
541aa76b64 | ||
|
|
7b8555d524 | ||
|
|
fdf998c181 | ||
|
|
3d6b343adc | ||
|
|
e338cecc14 | ||
|
|
e5537a33fb | ||
|
|
35384deb68 | ||
|
|
547ca60c2a | ||
|
|
376f6f7455 | ||
|
|
abe92dedff | ||
|
|
4b521ceedc | ||
|
|
6dfcb9e52b | ||
|
|
335e3216e2 | ||
|
|
5b22bb4818 | ||
|
|
0097004882 | ||
|
|
1bc9e4c2d3 | ||
|
|
36c7e1a3c3 | ||
|
|
c6b4d04e26 | ||
|
|
fa6cf068c7 | ||
|
|
7c273a3a48 | ||
|
|
3de2ea1523 | ||
|
|
c5c9f84503 | ||
|
|
16ea9a3e07 | ||
|
|
48f952c798 | ||
|
|
f78ea5de07 | ||
|
|
5adbd5e784 | ||
|
|
5b2afa79d7 | ||
|
|
dc4e6d02b7 | ||
|
|
8ae61c8f78 | ||
|
|
684b8e0914 | ||
|
|
7c3314abae | ||
|
|
ab9f8ff356 | ||
|
|
892d8cd5c1 | ||
|
|
8b8b45778d | ||
|
|
6655fb182c | ||
|
|
0926d40247 | ||
|
|
ddc4d36688 | ||
|
|
53e1f22eb1 | ||
|
|
3d2a34737b | ||
|
|
ebde77008c | ||
|
|
3d27fd04ba | ||
|
|
d9fcaf3473 | ||
|
|
d266f761aa | ||
|
|
1d01405412 | ||
|
|
7c62eb5bd6 | ||
|
|
4dcc76d366 | ||
|
|
d2fad19a11 | ||
|
|
7c92c4c964 | ||
|
|
5a71d33236 | ||
|
|
1b4db4f793 | ||
|
|
c084b22815 | ||
|
|
acacef95cd | ||
|
|
5d722183d3 | ||
|
|
ac19ea5407 | ||
|
|
d19b05b970 | ||
|
|
a0795136ac | ||
|
|
d2566e345a | ||
|
|
66cd7cf90e | ||
|
|
9a599981ef | ||
|
|
f51f7bc82a | ||
|
|
dbcbac0137 | ||
|
|
e722f8a87c | ||
|
|
61679749eb | ||
|
|
23e12c9c44 | ||
|
|
6da78cd3e5 | ||
|
|
78ce8100a3 | ||
|
|
76ba338b45 | ||
|
|
823fe2deb2 | ||
|
|
cb90f692f2 | ||
|
|
0325343ede | ||
|
|
69d1556a1d | ||
|
|
2daa043840 | ||
|
|
f340ca9d05 | ||
|
|
02abd038fa | ||
|
|
b9da68ec28 | ||
|
|
88b3910d80 | ||
|
|
160412f6e4 | ||
|
|
59a86b25fc | ||
|
|
49e58b39f5 | ||
|
|
58e0757bbd | ||
|
|
5ff4197572 | ||
|
|
b56e28d27a | ||
|
|
c3d39e1dd4 | ||
|
|
716aa36bfd | ||
|
|
f01460170e | ||
|
|
a414ce282d | ||
|
|
6c32f3b130 | ||
|
|
4cf907c572 | ||
|
|
b28baaa5aa | ||
|
|
980dea64e0 | ||
|
|
c340f6436f | ||
|
|
54376fd105 | ||
|
|
ef006578b2 | ||
|
|
b0b1ee0c60 | ||
|
|
4e2026aa2d | ||
|
|
e0e50b4bd5 | ||
|
|
c9b52f1310 | ||
|
|
0195213dfb | ||
|
|
d6225cbde3 | ||
|
|
7b4c194b97 | ||
|
|
a5ecff24a3 | ||
|
|
c9c003dc9b | ||
|
|
fd95936219 | ||
|
|
15a3fd4456 | ||
|
|
df896542e4 | ||
|
|
8927e81274 | ||
|
|
340f061827 | ||
|
|
15cbac97c2 | ||
|
|
bb32d0f7d1 | ||
|
|
c370fba9ba | ||
|
|
6e32421172 | ||
|
|
6643687c0a | ||
|
|
ed01e78d77 | ||
|
|
93aed52f88 | ||
|
|
bb6d1fd6a3 | ||
|
|
6e33179fc2 | ||
|
|
277fd167cf | ||
|
|
98e8d5170b | ||
|
|
11ee1651ae | ||
|
|
0dfcf9b1e6 | ||
|
|
08f57ac5bc | ||
|
|
7095e781e9 | ||
|
|
df18b93809 | ||
|
|
0c2e028b38 | ||
|
|
80cb1bc129 | ||
|
|
74c1cb51f6 | ||
|
|
2e864bddf9 | ||
|
|
e60ae91b5d | ||
|
|
d606cd86a0 | ||
|
|
bc463c37f4 | ||
|
|
76c1480903 | ||
|
|
6f312caf8b | ||
|
|
980d8d374f | ||
|
|
c49b34942f | ||
|
|
fcfa8717a5 | ||
|
|
954a265965 | ||
|
|
69845a020a | ||
|
|
22200fd8a7 | ||
|
|
add441675d | ||
|
|
d3d9754277 | ||
|
|
aa5e2edbc5 | ||
|
|
310b099ecf | ||
|
|
1cfaef911c | ||
|
|
b931c5f638 | ||
|
|
7c683668eb | ||
|
|
cab7ac7d58 | ||
|
|
15e69c538a | ||
|
|
31ee938b66 | ||
|
|
e51a8d43d9 | ||
|
|
64cd5b6e4b | ||
|
|
6c9ef34905 | ||
|
|
aa89019236 | ||
|
|
df58fcee16 | ||
|
|
ea3ffc429f | ||
|
|
2efca7a2b5 | ||
|
|
9db448a5e2 | ||
|
|
feee90beef | ||
|
|
906a63b6b5 | ||
|
|
2ce64ac213 | ||
|
|
4d8bf57135 | ||
|
|
c5348ce4b3 | ||
|
|
7f87c03f97 | ||
|
|
9469f148ff | ||
|
|
ffb7dc4ec2 | ||
|
|
242b8fa746 | ||
|
|
50cae5ac3b | ||
|
|
6a71233eb2 | ||
|
|
1aff8933c9 | ||
|
|
0ed87a5dfc | ||
|
|
24a6bcbd1e | ||
|
|
ca7f3da19d | ||
|
|
bf047e2a3c | ||
|
|
4454287be9 | ||
|
|
3bd2183655 | ||
|
|
1f7080e8f8 | ||
|
|
8b20761e8b | ||
|
|
655d0b5d5f | ||
|
|
91849cdd3a | ||
|
|
df25a694c3 | ||
|
|
eabaca145e | ||
|
|
2f0e458765 | ||
|
|
ff8037f231 | ||
|
|
a116028e1b | ||
|
|
e606a02b29 | ||
|
|
531c712ea5 | ||
|
|
3ae7624361 | ||
|
|
fed83462fa | ||
|
|
58c9f937c5 | ||
|
|
5d14b9209d | ||
|
|
305a95fa74 | ||
|
|
b29c1e702a | ||
|
|
b04d75ab08 | ||
|
|
25abfaadb9 | ||
|
|
1df81b8698 | ||
|
|
4487846fd7 | ||
|
|
86918f5160 | ||
|
|
bc723b3f15 | ||
|
|
1881e646d4 | ||
|
|
aa98808a1a | ||
|
|
f9a2232703 | ||
|
|
19d6be8663 | ||
|
|
0eb7c890ad | ||
|
|
7bfa68aa58 | ||
|
|
857a38050e | ||
|
|
c5b7f92caf | ||
|
|
df31ffd7fb | ||
|
|
0df0322d36 | ||
|
|
260552322d | ||
|
|
88ef6496a2 | ||
|
|
bdf123bf7b | ||
|
|
8fc3760eef | ||
|
|
5656f6f709 | ||
|
|
53e7e8b77e | ||
|
|
b990915b7a | ||
|
|
15b7822ffd | ||
|
|
cfa28419cb | ||
|
|
30ef0d2a3a | ||
|
|
755f99200a | ||
|
|
7af79ed3a2 | ||
|
|
2971e14269 | ||
|
|
01954aaf30 | ||
|
|
da018a8f2a | ||
|
|
77400bbbb0 | ||
|
|
3c3333d3df | ||
|
|
4963bd4144 | ||
|
|
b4a418dded | ||
|
|
a724b0daee | ||
|
|
88aa620cb4 | ||
|
|
70d3448110 | ||
|
|
09a1a406a6 | ||
|
|
40939d0b7f | ||
|
|
aec1d184c8 | ||
|
|
69d3cb5dd8 | ||
|
|
3deff162bb | ||
|
|
4ed54568d3 | ||
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c | ||
|
|
9c173d1de0 | ||
|
|
e11b6d74ed | ||
|
|
c7efe899fa | ||
|
|
adcd68c1ab | ||
|
|
23a4ebb74a | ||
|
|
cccb9a5fec | ||
|
|
b416e3ab3e | ||
|
|
e16b7d65d4 | ||
|
|
3744c64459 | ||
|
|
f742c2a3e2 | ||
|
|
142b83cc13 | ||
|
|
bad84289c4 | ||
|
|
166a573392 | ||
|
|
3585e365e7 | ||
|
|
5114ac7721 | ||
|
|
703d941f23 | ||
|
|
c691c52751 | ||
|
|
69f050b864 | ||
|
|
3af1fd4d1b | ||
|
|
bdbf356910 | ||
|
|
5573187d43 | ||
|
|
767c65ab10 | ||
|
|
a1cccd479e | ||
|
|
73d3d43215 | ||
|
|
b91bfa21b3 |
16
.devcontainer/Dockerfile
Normal file
16
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node/.devcontainer/base.Dockerfile
|
||||
|
||||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||
ARG VARIANT="16-bullseye"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||
# ARG EXTRA_NODE_VERSION=10
|
||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||
|
||||
# [Optional] Uncomment if you want to install more global node modules
|
||||
RUN su node -c "npm install -g pnpm"
|
||||
28
.devcontainer/devcontainer.json
Normal file
28
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,28 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node
|
||||
{
|
||||
"name": "Node.js",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||
"args": {
|
||||
"VARIANT": "16-bullseye"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": ["dbaeumer.vscode-eslint", "svelte.svelte-vscode"],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"docker-in-docker": "20.10",
|
||||
"github-cli": "latest"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,16 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
.routify
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
@@ -1,35 +1,8 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
COOLIFY_APP_ID=
|
||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||
COOLIFY_SENTRY_DSN=
|
||||
COOLIFY_IS_ON=docker
|
||||
COOLIFY_WHITE_LABELED=false
|
||||
COOLIFY_WHITE_LABELED_ICON=
|
||||
COOLIFY_AUTO_UPDATE=false
|
||||
20
.eslintrc.cjs
Normal file
20
.eslintrc.cjs
Normal file
@@ -0,0 +1,20 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: '@typescript-eslint/parser',
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||
plugins: ['svelte3', '@typescript-eslint'],
|
||||
ignorePatterns: ['*.cjs'],
|
||||
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||
settings: {
|
||||
'svelte3/typescript': () => require('typescript')
|
||||
},
|
||||
parserOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
es2017: true,
|
||||
node: true
|
||||
}
|
||||
};
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
open_collective: coollabsio
|
||||
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: 🐞 Bug report
|
||||
description: Create a bug report to help us improve coolify
|
||||
title: "[Bug]: "
|
||||
labels: [Bug]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report! Please fill the form in English
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: A concise description of what you're experiencing and what you expect.
|
||||
placeholder: |
|
||||
When I do <X>, <Y> happens and I see the error message attached below:
|
||||
```...```
|
||||
What I expect is <Z>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Add steps to reproduce this behaviour, include console / network logs & videos
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: "The version of your coolify Instance"
|
||||
placeholder: "2.5.2"
|
||||
validations:
|
||||
required: true
|
||||
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: 🛠️ Feature request
|
||||
description: Suggest an idea to improve coolify
|
||||
title: '[Feature]: '
|
||||
labels: [Enhancement]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this feature request already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: One paragraph description of the feature.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Why should this be worked on?
|
||||
description: A concise description of the problems or use cases for this feature request.
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: 📝 Task
|
||||
description: Create a task for the team to work on
|
||||
title: "[Task]: "
|
||||
labels: [Task]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: SubTasks
|
||||
placeholder: |
|
||||
- Sub Task 1
|
||||
- Sub Task 2
|
||||
validations:
|
||||
required: false
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 🤔 Questions and Help
|
||||
url: https://discord.com/invite/6rDM4fkymF
|
||||
about: Reach out to us on discord or our github discussions page.
|
||||
- name: 🙋♂️ service request
|
||||
url: https://feedback.coolify.io/
|
||||
about: want to request a new service? for e.g wordpress, hasura, appwrite etc...
|
||||
39
.github/workflows/github-actions.yml
vendored
Normal file
39
.github/workflows/github-actions.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: release-coolify
|
||||
|
||||
on:
|
||||
release:
|
||||
types: published
|
||||
|
||||
jobs:
|
||||
make-it-coolifyed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||
22
.gitignore
vendored
22
.gitignore
vendored
@@ -1,10 +1,16 @@
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
.routify
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
/yarn.lock
|
||||
/.pnpm-store
|
||||
/ssl
|
||||
|
||||
.env
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
.env.prod
|
||||
.env.stag
|
||||
/db/*.db
|
||||
/db/*.db-journal
|
||||
/data/haproxy/haproxy.cfg
|
||||
/data/haproxy/haproxy.cfg.lkg
|
||||
|
||||
1
.husky/_/.gitignore
vendored
Normal file
1
.husky/_/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*
|
||||
4
.husky/pre-commit
Executable file
4
.husky/pre-commit
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/sh
|
||||
. "$(dirname "$0")/_/husky.sh"
|
||||
|
||||
pnpm lint-staged
|
||||
5
.lintstagedrc.json
Normal file
5
.lintstagedrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
|
||||
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
|
||||
]
|
||||
}
|
||||
18
.prettierrc
18
.prettierrc
@@ -1,14 +1,6 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"printWidth": 80,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"svelteSortOrder" : "styles-scripts-markup",
|
||||
"svelteStrictMode": true,
|
||||
"svelteBracketNewLine": true,
|
||||
"svelteAllowShorthand": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
}
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
|
||||
11
.vscode/settings.json
vendored
Normal file
11
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.extract.ignoredByFiles": {
|
||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
||||
},
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
||||
"i18n-ally.extract.autoDetect": true
|
||||
}
|
||||
279
CONTRIBUTING.md
Normal file
279
CONTRIBUTING.md
Normal file
@@ -0,0 +1,279 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
### Setup with github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup locally in your machine
|
||||
|
||||
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
#### Steps for local setup
|
||||
|
||||
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
2. Install dependencies with `pnpm install`.
|
||||
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
4. Seed the database with base entities with `pnpm db:seed`
|
||||
5. You can start coding after starting `pnpm dev`.
|
||||
|
||||
## 🧑💻 Developer contribution
|
||||
|
||||
### Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: Svelte / SvelteKit
|
||||
- **Database ORM**: Prisma.io
|
||||
- **Docker Engine**
|
||||
|
||||
### Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
### Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
|
||||
|
||||
---
|
||||
|
||||
# How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationshup with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Update global functions
|
||||
|
||||
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: Prisma.ServiceInclude = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
```js
|
||||
if (body.umami?.postgresqlPassword)
|
||||
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
|
||||
|
||||
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
### Create API endpoints.
|
||||
|
||||
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
|
||||
|
||||
#### `index.json.ts`:
|
||||
|
||||
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
|
||||
|
||||
```js
|
||||
import { getUserDetails } from '$lib/common';
|
||||
import * as db from '$lib/database';
|
||||
import { ErrorHandler } from '$lib/database';
|
||||
import type { RequestHandler } from '@sveltejs/kit';
|
||||
|
||||
export const post: RequestHandler = async (event) => {
|
||||
const { status, body } = await getUserDetails(event);
|
||||
if (status === 401) return { status, body };
|
||||
|
||||
const { id } = event.params;
|
||||
|
||||
let { name, fqdn } = await event.request.json();
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
|
||||
try {
|
||||
await db.updateService({ id, fqdn, name });
|
||||
return { status: 201 };
|
||||
} catch (error) {
|
||||
return ErrorHandler(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
If it's necessary, you can create your own database update function, specifically for the new service.
|
||||
|
||||
#### `start.json.ts`
|
||||
|
||||
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
|
||||
|
||||
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
|
||||
|
||||
#### `stop.json.ts`
|
||||
|
||||
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
|
||||
|
||||
## Frontend
|
||||
|
||||
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
2. You need to include it the logo at
|
||||
|
||||
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
|
||||
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
|
||||
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
|
||||
|
||||
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
|
||||
|
||||
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
|
||||
|
||||
## 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating!
|
||||
42
Dockerfile
Normal file
42
Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
FROM node:16.14.2-alpine as install
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
|
||||
COPY package*.json .
|
||||
RUN pnpm install
|
||||
|
||||
FROM node:16.14.2-alpine
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:latest /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
COPY --from=install /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
||||
RUN pnpm add -g pnpm
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
|
||||
RUN pnpm prisma generate
|
||||
RUN pnpm build
|
||||
|
||||
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["pnpm", "start"]
|
||||
144
README.md
144
README.md
@@ -1,97 +1,115 @@
|
||||
# About
|
||||
# Coolify
|
||||
|
||||
https://andrasbacsai.com/farewell-netlify-and-heroku-after-3-days-of-coding
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
# Features
|
||||
- Deploy your Node.js and static sites just by pushing code to git.
|
||||
- Hassle-free installation and upgrade process.
|
||||
- One-click MongoDB, MySQL, PostgreSQL, CouchDB deployments!
|
||||
## Live Demo
|
||||
|
||||
# Upcoming features
|
||||
- Backups & monitoring.
|
||||
- User analytics with privacy in mind.
|
||||
- And much more (see [Roadmap](https://github.com/coollabsio/coolify/projects/1)).
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsive, that means someone overloaded the server. 🙃)
|
||||
|
||||
# FAQ
|
||||
Q: What does Buildpack means?
|
||||
## Feedback
|
||||
|
||||
A: It defines your application's final form. Static means that it will be hosted as a static site in the end. (see next question below 👇)
|
||||
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
---
|
||||
## How to install
|
||||
|
||||
Q: How can I build a static site, like Next.js, Sapper (prerendered), etc ?
|
||||
Installation is automated with the following command:
|
||||
|
||||
A: Use `static` builder and set your `Build command`.
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
# Screenshots
|
||||
If you would like no questions during installation:
|
||||
|
||||
[Login](https://coollabs.io/coolify/login.jpg)
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
[Applications](https://coollabs.io/coolify/applications.jpg)
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
[Databases](https://coollabs.io/coolify/databases.jpg)
|
||||
## Features
|
||||
|
||||
[Configuration](https://coollabs.io/coolify/configuration.jpg)
|
||||
### Git Sources
|
||||
|
||||
[Settings](https://coollabs.io/coolify/settings.jpg)
|
||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
||||
|
||||
[Logs](https://coollabs.io/coolify/logs.jpg)
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
# Getting Started
|
||||
### Destinations
|
||||
|
||||
Automatically: `sh <(curl -fsSL https://get.coollabs.io/install.sh) coolify`
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
Manually:
|
||||
### Requirements before installation
|
||||
- [Docker](https://docs.docker.com/engine/install/) version 20+
|
||||
- Docker in [swarm mode enabled](https://docs.docker.com/engine/reference/commandline/swarm_init/) (should be set manually before installation)
|
||||
- A [MongoDB](https://docs.mongodb.com/manual/installation/) instance.
|
||||
- We have a [simple installation](https://github.com/coollabsio/infrastructure/tree/main/mongo) if you need one
|
||||
- A configured DNS entry (see `.env.template`)
|
||||
- [Github App](https://docs.github.com/en/developers/apps/creating-a-github-app)
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
- GitHub App name: could be anything weird
|
||||
- Homepage URL: https://yourdomain
|
||||
### Applications
|
||||
|
||||
Identifying and authorizing users:
|
||||
- Callback URL: https://yourdomain/api/v1/login/github/app
|
||||
- Request user authorization (OAuth) during installation -> Check!
|
||||
These are the predefined build packs, but with the Docker build pack, you can host anything that is hostable with a single Dockerfile.
|
||||
|
||||
Webhook:
|
||||
- Active -> Check!
|
||||
- Webhook URL: https://yourdomain/api/v1/webhooks/deploy
|
||||
- Webhook Secret: it should be super secret
|
||||
- Static sites
|
||||
- NodeJS
|
||||
- VueJS
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Laravel
|
||||
- Rust
|
||||
- Docker
|
||||
- Python
|
||||
- Deno
|
||||
|
||||
Repository permissions:
|
||||
- Contents: Read-only
|
||||
- Metadata: Read-only
|
||||
|
||||
User permissions:
|
||||
- Email: Read-only
|
||||
### Databases
|
||||
|
||||
Subscribe to events:
|
||||
- Push -> Check!
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
### Installation
|
||||
- Clone this repository: `git clone git@github.com:coollabsio/coolify.git`
|
||||
- Set `.env` (see `.env.template`)
|
||||
- Installation: `bash install.sh all`
|
||||
- MongoDB
|
||||
- MariaDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
## Manual updating process (You probably never need to do this!)
|
||||
### Update everything (proxy+coolify)
|
||||
- `bash install.sh all`
|
||||
### One-click services
|
||||
|
||||
### Update coolify only
|
||||
- `bash install.sh coolify`
|
||||
You can host cool open-source services as well:
|
||||
|
||||
### Update proxy only
|
||||
- `bash install.sh proxy`
|
||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 is not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
# Contact
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Contribute
|
||||
|
||||
See [our contribution guide](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
# License
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
|
||||
27
api/app.js
27
api/app.js
@@ -1,27 +0,0 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
if (process.env.NODE_ENV === 'production') server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
server.register(require('./routes/v1/config'), { prefix: '/config' })
|
||||
server.register(require('./routes/v1/application/remove'), { prefix: '/application/remove' })
|
||||
server.register(require('./routes/v1/application/logs'), { prefix: '/application/logs' })
|
||||
server.register(require('./routes/v1/application/check'), { prefix: '/application/check' })
|
||||
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
|
||||
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
|
||||
server.register(require('./routes/v1/databases'), { prefix: '/databases' })
|
||||
})
|
||||
// Public routes
|
||||
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })
|
||||
fastify.register(require('./routes/v1/login/github'), {
|
||||
prefix: '/login/github'
|
||||
})
|
||||
fastify.register(require('./routes/v1/webhooks/deploy'), {
|
||||
prefix: '/webhooks/deploy'
|
||||
})
|
||||
fastify.register(require('./routes/v1/undead'), {
|
||||
prefix: '/undead'
|
||||
})
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server-core')
|
||||
|
||||
const mongoServer = new MongoMemoryServer({
|
||||
instance: {
|
||||
port: 27017,
|
||||
dbName: 'coolify',
|
||||
storageEngine: 'wiredTiger'
|
||||
},
|
||||
binary: {
|
||||
version: '4.4.3'
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
mongoose.Promise = Promise
|
||||
mongoServer.getUri().then((mongoUri) => {
|
||||
const mongooseOpts = {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true
|
||||
}
|
||||
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
|
||||
mongoose.connection.on('error', (e) => {
|
||||
if (e.message.code === 'ETIMEDOUT') {
|
||||
console.log(e)
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
}
|
||||
console.log(e)
|
||||
})
|
||||
|
||||
mongoose.connection.once('open', () => {
|
||||
console.log(`Started in-memory mongodb ${mongoUri}`)
|
||||
})
|
||||
})
|
||||
@@ -1,34 +0,0 @@
|
||||
const packs = require('../../../packs')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const deployId = configuration.general.deployId
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
|
||||
await execute(configuration)
|
||||
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} catch (error) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
if (error.stack) throw { error: error.stack, type: 'server' }
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
} else {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
throw { error: 'No buildpack found.', type: 'app' }
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
const { docker } = require('../../docker')
|
||||
const { execShellAsync, delay } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeOldThings () {
|
||||
try {
|
||||
await docker.engine.pruneImages()
|
||||
await docker.engine.pruneContainers()
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanup (configuration) {
|
||||
const { id } = configuration.repository
|
||||
const deployId = configuration.general.deployId
|
||||
try {
|
||||
// Cleanup stucked deployments.
|
||||
const deployments = await Deployment.find({ repoId: id, deployId: { $ne: deployId }, progress: { $in: ['queued', 'inprogress'] } })
|
||||
for (const deployment of deployments) {
|
||||
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } })
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
try {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { cleanup, deleteSameDeployments, purgeOldThings }
|
||||
@@ -1,62 +0,0 @@
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const cuid = require('cuid')
|
||||
const { execShellAsync } = require('../common')
|
||||
const crypto = require('crypto')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
try {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) configuration.publish.port = configuration.build.pack === 'static' ? 80 : 3000
|
||||
|
||||
if (configuration.build.pack === 'static') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
if (!configuration.build.directory) configuration.build.directory = '/'
|
||||
}
|
||||
|
||||
if (configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
if (!configuration.build.directory) configuration.build.directory = '/'
|
||||
}
|
||||
|
||||
return configuration
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration, services) {
|
||||
// In case of any failure during deployment, still update the current configuration.
|
||||
const found = services.find(s => {
|
||||
const config = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
|
||||
return config
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
try {
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = { setDefaultConfiguration, updateServiceLabels }
|
||||
@@ -1,53 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
// TODO: Do it better.
|
||||
await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
|
||||
await fs.writeFile(
|
||||
`${configuration.general.workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
|
||||
access_log off;
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const { execShellAsync } = require('../../common')
|
||||
const { docker } = require('../../docker')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
const fs = require('fs').promises
|
||||
|
||||
module.exports = async function (configuration, configChanged, imageChanged) {
|
||||
try {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 1,
|
||||
window: '120s'
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
if (configChanged) {
|
||||
// console.log('configuration changed')
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
} else if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
} catch (error) {
|
||||
await saveAppLog(`Error occured during deployment: ${error.message}`, configuration)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const jwt = require('jsonwebtoken')
|
||||
const axios = require('axios')
|
||||
const { execShellAsync, cleanupTmp } = require('../../common')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { workdir } = configuration.general
|
||||
const { organization, name, branch } = configuration.repository
|
||||
const github = configuration.github
|
||||
|
||||
const githubPrivateKey = process.env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, '\n').replace(/"/g, '')
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
try {
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
} catch (error) {
|
||||
cleanupTmp(workdir)
|
||||
if (error.stack) console.log(error.stack)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { saveServerLog } = require('../logging')
|
||||
const { cleanupTmp } = require('../common')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { cleanup, purgeOldThings } = require('./cleanup')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, services, configChanged, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname, workdir } = configuration.general
|
||||
try {
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, configChanged, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration, services)
|
||||
cleanupTmp(workdir)
|
||||
await purgeOldThings()
|
||||
} catch (error) {
|
||||
await cleanup(configuration)
|
||||
cleanupTmp(workdir)
|
||||
const { type } = error.error
|
||||
if (type === 'app') {
|
||||
await saveAppLog(error.error, configuration, true)
|
||||
} else {
|
||||
await saveServerLog({ event: error.error, configuration })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
@@ -1,94 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const shell = require('shelljs')
|
||||
const jsonwebtoken = require('jsonwebtoken')
|
||||
const { docker } = require('./docker')
|
||||
const User = require('../models/User')
|
||||
const algorithm = 'aes-256-cbc'
|
||||
const key = process.env.SECRETS_ENCRYPTION_KEY
|
||||
|
||||
function delay (t) {
|
||||
return new Promise(function (resolve) {
|
||||
setTimeout(function () {
|
||||
resolve('OK')
|
||||
}, t)
|
||||
})
|
||||
}
|
||||
|
||||
async function verifyUserId (authorization) {
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
function execShellAsync (cmd, opts = {}) {
|
||||
try {
|
||||
return new Promise(function (resolve, reject) {
|
||||
shell.config.silent = true
|
||||
shell.exec(cmd, opts, function (code, stdout, stderr) {
|
||||
if (code !== 0) return reject(new Error(stderr))
|
||||
return resolve(stdout)
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
return new Error('Oops')
|
||||
}
|
||||
}
|
||||
function cleanupTmp (dir) {
|
||||
if (dir !== '/') shell.rm('-fr', dir)
|
||||
}
|
||||
|
||||
async function checkImageAvailable (name) {
|
||||
let cacheAvailable = false
|
||||
try {
|
||||
await docker.engine.getImage(name).get()
|
||||
cacheAvailable = true
|
||||
} catch (e) {
|
||||
// Cache image not found
|
||||
}
|
||||
return cacheAvailable
|
||||
}
|
||||
|
||||
function encryptData (text) {
|
||||
const iv = crypto.randomBytes(16)
|
||||
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
|
||||
let encrypted = cipher.update(text)
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()])
|
||||
return { iv: iv.toString('hex'), encryptedData: encrypted.toString('hex') }
|
||||
}
|
||||
|
||||
function decryptData (text) {
|
||||
const iv = Buffer.from(text.iv, 'hex')
|
||||
const encryptedText = Buffer.from(text.encryptedData, 'hex')
|
||||
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
|
||||
let decrypted = decipher.update(encryptedText)
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()])
|
||||
return decrypted.toString()
|
||||
}
|
||||
|
||||
function createToken (payload) {
|
||||
const { uuid } = payload
|
||||
return jsonwebtoken.sign({}, process.env.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolify',
|
||||
issuer: 'coolify',
|
||||
jwtid: uuid,
|
||||
subject: `User:${uuid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
delay,
|
||||
createToken,
|
||||
execShellAsync,
|
||||
cleanupTmp,
|
||||
checkImageAvailable,
|
||||
encryptData,
|
||||
decryptData,
|
||||
verifyUserId
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
const Dockerode = require('dockerode')
|
||||
const { saveAppLog } = require('./logging')
|
||||
|
||||
const docker = {
|
||||
engine: new Dockerode({
|
||||
socketPath: process.env.DOCKER_ENGINE
|
||||
}),
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
reject(event.error)
|
||||
return
|
||||
}
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
@@ -1,55 +0,0 @@
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
// console.log(event, config, isError)
|
||||
let clearedEvent = null
|
||||
|
||||
if (event.error) clearedEvent = '[ERROR] ' + generateTimestamp() + event.error.replace(/(\r\n|\n|\r)/gm, '')
|
||||
else if (event) clearedEvent = '[ERROR] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog ({ event, configuration, type }) {
|
||||
if (configuration) {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: `[SERVER ERROR 😖]: ${event}` }).save()
|
||||
}
|
||||
await new ServerLog({ event, type }).save()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const deploymentSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
nickname: { type: String, required: true },
|
||||
repoId: { type: Number, required: true },
|
||||
organization: { type: String, required: true },
|
||||
name: { type: String, required: true },
|
||||
branch: { type: String, required: true },
|
||||
domain: { type: String, required: true },
|
||||
progress: { type: String, require: true, default: 'queued' }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('deployment', deploymentSchema)
|
||||
@@ -1,10 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
event: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-application', logSchema)
|
||||
@@ -1,13 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, required: true, default: version },
|
||||
type: { type: String, required: true, enum: ['API', 'UPGRADE-P-1', 'UPGRADE-P-2'], default: 'API' },
|
||||
event: { type: String, required: true },
|
||||
seen: { type: Boolean, required: true, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-server', logSchema)
|
||||
@@ -1,11 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('settings', settingsSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
email: { type: String, required: true },
|
||||
avatar: { type: String },
|
||||
uid: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('user', userSchema)
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../libs/docker')
|
||||
|
||||
async function buildImage (configuration) {
|
||||
let dockerFile = `
|
||||
# build
|
||||
FROM node:lts
|
||||
WORKDIR /usr/src/app
|
||||
COPY package*.json .
|
||||
`
|
||||
if (configuration.build.command.installation) {
|
||||
dockerFile += `RUN ${configuration.build.command.installation}
|
||||
`
|
||||
}
|
||||
dockerFile += `COPY . .
|
||||
RUN ${configuration.build.command.build}`
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, dockerFile)
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildImage
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
const static = require('./static')
|
||||
const nodejs = require('./nodejs')
|
||||
|
||||
module.exports = { static, nodejs }
|
||||
@@ -1,32 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
|
||||
let dockerFile = `# production stage
|
||||
FROM node:lts
|
||||
WORKDIR /usr/src/app
|
||||
`
|
||||
if (configuration.build.command.build) {
|
||||
dockerFile += `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.build.directory} /usr/src/app`
|
||||
} else {
|
||||
dockerFile += 'COPY . ./'
|
||||
}
|
||||
if (configuration.build.command.installation) {
|
||||
dockerFile += `
|
||||
RUN ${configuration.build.command.installation}
|
||||
`
|
||||
}
|
||||
dockerFile += `
|
||||
EXPOSE ${configuration.publish.port}
|
||||
CMD [ "yarn", "start" ]`
|
||||
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, dockerFile)
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
|
||||
let dockerFile = `# production stage
|
||||
FROM nginx:stable-alpine
|
||||
COPY nginx.conf /etc/nginx/nginx.conf
|
||||
`
|
||||
if (configuration.build.command.build) {
|
||||
dockerFile += `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.build.directory} /usr/share/nginx/html`
|
||||
} else {
|
||||
dockerFile += 'COPY . /usr/share/nginx/html'
|
||||
}
|
||||
|
||||
dockerFile += `
|
||||
EXPOSE 80
|
||||
CMD ["nginx", "-g", "daemon off;"]`
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, dockerFile)
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
const fp = require('fastify-plugin')
|
||||
const User = require('../models/User')
|
||||
module.exports = fp(async function (fastify, options, next) {
|
||||
fastify.register(require('fastify-jwt'), {
|
||||
secret: fastify.config.JWT_SIGN_KEY
|
||||
})
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
try {
|
||||
const { jti } = await request.jwtVerify()
|
||||
const found = await User.findOne({ uid: jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
} catch (err) {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
})
|
||||
next()
|
||||
})
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
const { verifyUserId } = require('../../../libs/common')
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
if (!await verifyUserId(request.headers.authorization)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
let foundDomain = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
|
||||
if (foundDomain) {
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
return { message: 'OK' }
|
||||
})
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
|
||||
const { verifyUserId, cleanupTmp, execShellAsync } = require('../../../../libs/common')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// const postSchema = {
|
||||
// body: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// ref: { type: "string" },
|
||||
// repository: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// full_name: { type: "string" },
|
||||
// },
|
||||
// required: ["id", "full_name"],
|
||||
// },
|
||||
// installation: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// },
|
||||
// required: ["id"],
|
||||
// },
|
||||
// },
|
||||
// required: ["ref", "repository", "installation"],
|
||||
// },
|
||||
// };
|
||||
fastify.post('/', async (request, reply) => {
|
||||
if (!await verifyUserId(request.headers.authorization)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
await cloneRepository(configuration)
|
||||
|
||||
let foundService = false
|
||||
let foundDomain = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
|
||||
foundService = true
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
// If only the configuration changed
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
// If only the image changed
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
// If build pack changed, forceUpdate the service
|
||||
if (running.build.pack !== configuration.build.pack) forceUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundDomain) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
} else {
|
||||
if (foundService && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, services, configChanged, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
})
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const getLogSchema = {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
repoId: { type: 'string' },
|
||||
branch: { type: 'string' }
|
||||
},
|
||||
required: ['repoId', 'branch']
|
||||
}
|
||||
}
|
||||
fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
|
||||
const { repoId, branch, page } = request.query
|
||||
const onePage = 5
|
||||
const show = Number(page) * onePage || 5
|
||||
const deploy = await Deployment.find({ repoId, branch })
|
||||
.select('-_id -__v -repoId')
|
||||
.sort({ createdAt: 'desc' })
|
||||
.limit(show)
|
||||
|
||||
const finalLogs = deploy.map(d => {
|
||||
const finalLogs = { ...d._doc }
|
||||
|
||||
const updatedAt = dayjs(d.updatedAt).utc()
|
||||
|
||||
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
|
||||
finalLogs.since = updatedAt.fromNow()
|
||||
|
||||
return finalLogs
|
||||
})
|
||||
return finalLogs
|
||||
})
|
||||
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const logs = await ApplicationLog.find({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'asc' })
|
||||
|
||||
const deploy = await Deployment.findOne({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'desc' })
|
||||
|
||||
const finalLogs = {}
|
||||
finalLogs.progress = deploy.progress
|
||||
finalLogs.events = logs.map(log => log.event)
|
||||
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt))
|
||||
return finalLogs
|
||||
} catch (e) {
|
||||
throw new Error('No logs found')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const { name } = request.query
|
||||
const service = await docker.engine.getService(`${name}_${name}`)
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
})
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { organization, name, branch } = request.body
|
||||
let found = false
|
||||
try {
|
||||
(await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.organization === organization &&
|
||||
running.repository.name === name &&
|
||||
running.repository.branch === branch) {
|
||||
found = running
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const deploys = await Deployment.find({ organization, branch, name })
|
||||
for (const deploy of deploys) {
|
||||
await ApplicationLog.deleteMany({ deployId: deploy.deployId })
|
||||
await Deployment.deleteMany({ deployId: deploy.deployId })
|
||||
}
|
||||
await execShellAsync(`docker stack rm ${found.build.container.name}`)
|
||||
reply.code(200).send({ organization, name, branch })
|
||||
} else {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
} catch (error) {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
const { docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// const getConfig = {
|
||||
// querystring: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// repoId: { type: 'number' },
|
||||
// branch: { type: 'string' }
|
||||
// },
|
||||
// required: ['repoId', 'branch']
|
||||
// }
|
||||
// }
|
||||
|
||||
// const saveConfig = {
|
||||
// body: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// build: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// baseDir: { type: 'string' },
|
||||
// installCmd: { type: 'string' },
|
||||
// buildCmd: { type: 'string' }
|
||||
// },
|
||||
// required: ['baseDir', 'installCmd', 'buildCmd']
|
||||
// },
|
||||
// publish: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// publishDir: { type: 'string' },
|
||||
// domain: { type: 'string' },
|
||||
// pathPrefix: { type: 'string' },
|
||||
// port: { type: 'number' }
|
||||
// },
|
||||
// required: ['publishDir', 'domain', 'pathPrefix', 'port']
|
||||
// },
|
||||
// previewDeploy: { type: 'boolean' },
|
||||
// branch: { type: 'string' },
|
||||
// repoId: { type: 'number' },
|
||||
// buildPack: { type: 'string' },
|
||||
// fullName: { type: 'string' },
|
||||
// installationId: { type: 'number' }
|
||||
// },
|
||||
// required: ['build', 'publish', 'previewDeploy', 'branch', 'repoId', 'buildPack', 'fullName', 'installationId']
|
||||
// }
|
||||
// }
|
||||
|
||||
// fastify.get("/all", async (request, reply) => {
|
||||
// return await Config.find().select("-_id -__v");
|
||||
// });
|
||||
|
||||
// fastify.get("/", { schema: getConfig }, async (request, reply) => {
|
||||
// const { repoId, branch } = request.query;
|
||||
// return await Config.findOne({ repoId, branch }).select("-_id -__v");
|
||||
// });
|
||||
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { name, organization, branch } = request.body
|
||||
const services = await docker.engine.listServices()
|
||||
const applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
const found = applications.find(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
if (branch) {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization && configuration.repository.branch === branch) {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
return JSON.parse(found.Spec.Labels.configuration)
|
||||
} else {
|
||||
reply.code(500).send({ message: 'No configuration found.' })
|
||||
}
|
||||
})
|
||||
|
||||
// fastify.delete("/", async (request, reply) => {
|
||||
// const { repoId, branch } = request.body;
|
||||
|
||||
// const deploys = await Deployment.find({ repoId, branch })
|
||||
// const found = deploys.filter(d => d.progress !== 'done' && d.progress !== 'failed')
|
||||
// if (found.length > 0) {
|
||||
// throw new Error('Deployment inprogress, cannot delete now.');
|
||||
// }
|
||||
|
||||
// const config = await Config.findOneAndDelete({ repoId, branch })
|
||||
// for (const deploy of deploys) {
|
||||
// await ApplicationLog.findOneAndRemove({ deployId: deploy.deployId });
|
||||
// }
|
||||
// const secrets = await Secret.find({ repoId, branch });
|
||||
// for (const secret of secrets) {
|
||||
// await Secret.findByIdAndRemove(secret._id);
|
||||
// }
|
||||
// await execShellAsync(`docker stack rm ${config.containerName}`);
|
||||
// return { message: 'Deleted application and related configurations.' };
|
||||
// });
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const latestDeployments = await Deployment.aggregate([
|
||||
{
|
||||
$sort: { createdAt: -1 }
|
||||
},
|
||||
{
|
||||
$group:
|
||||
{
|
||||
_id: {
|
||||
repoId: '$repoId',
|
||||
branch: '$branch'
|
||||
},
|
||||
createdAt: { $last: '$createdAt' },
|
||||
progress: { $first: '$progress' }
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
const serverLogs = await ServerLog.find()
|
||||
const services = await docker.engine.listServices()
|
||||
|
||||
let applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
if (JSON.parse(r.Spec.Labels.configuration)) {
|
||||
const configuration = JSON.parse(r.Spec.Labels.configuration)
|
||||
const status = latestDeployments.find(l => configuration.repository.id === l._id.repoId && configuration.repository.branch === l._id.branch)
|
||||
if (status && status.progress) r.progress = status.progress
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
}
|
||||
return {}
|
||||
})
|
||||
databases = databases.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain, item])).values()]
|
||||
return {
|
||||
serverLogs,
|
||||
applications: {
|
||||
deployed: applications
|
||||
},
|
||||
databases: {
|
||||
deployed: databases
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const database = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId)
|
||||
if (database) {
|
||||
const jsonEnvs = {}
|
||||
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
const s = d.split('=')
|
||||
jsonEnvs[s[0]] = s[1]
|
||||
}
|
||||
const payload = {
|
||||
config: JSON.parse(database.Spec.Labels.configuration),
|
||||
envs: jsonEnvs
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('No database found?')
|
||||
}
|
||||
})
|
||||
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: { type: 'string', enum: ['mongodb', 'postgresql', 'mysql', 'couchdb'] }
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
|
||||
let { type, defaultDatabaseName } = request.body
|
||||
const passwords = generator.generateMultiple(2, {
|
||||
length: 24,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
const usernames = generator.generateMultiple(2, {
|
||||
length: 10,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
// TODO: Query for existing db with the same name
|
||||
const nickname = getUniq()
|
||||
|
||||
if (!defaultDatabaseName) defaultDatabaseName = nickname
|
||||
|
||||
reply.code(201).send({ message: 'Deploying.' })
|
||||
// TODO: Persistent volume, custom inputs
|
||||
const deployId = cuid()
|
||||
const configuration = {
|
||||
general: {
|
||||
workdir: `/tmp/${deployId}`,
|
||||
deployId,
|
||||
nickname,
|
||||
type
|
||||
},
|
||||
database: {
|
||||
usernames,
|
||||
passwords,
|
||||
defaultDatabaseName
|
||||
},
|
||||
deploy: {
|
||||
name: nickname
|
||||
}
|
||||
}
|
||||
let generateEnvs = {}
|
||||
let image = null
|
||||
let volume = null
|
||||
if (type === 'mongodb') {
|
||||
generateEnvs = {
|
||||
MONGODB_ROOT_PASSWORD: passwords[0],
|
||||
MONGODB_USERNAME: usernames[0],
|
||||
MONGODB_PASSWORD: passwords[1],
|
||||
MONGODB_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mongodb:4.4'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
|
||||
} else if (type === 'postgresql') {
|
||||
generateEnvs = {
|
||||
POSTGRESQL_PASSWORD: passwords[0],
|
||||
POSTGRESQL_USERNAME: usernames[0],
|
||||
POSTGRESQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/postgresql:13.2.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
|
||||
} else if (type === 'couchdb') {
|
||||
generateEnvs = {
|
||||
COUCHDB_PASSWORD: passwords[0],
|
||||
COUCHDB_USER: usernames[0]
|
||||
}
|
||||
image = 'bitnami/couchdb:3'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
|
||||
} else if (type === 'mysql') {
|
||||
generateEnvs = {
|
||||
MYSQL_ROOT_PASSWORD: passwords[0],
|
||||
MYSQL_ROOT_USER: usernames[0],
|
||||
MYSQL_USER: usernames[1],
|
||||
MYSQL_PASSWORD: passwords[1],
|
||||
MYSQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mysql:8.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
|
||||
}
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[configuration.general.deployId]: {
|
||||
image,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
volumes: [volume],
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
update_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=database',
|
||||
'configuration=' + JSON.stringify(configuration)
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${configuration.general.deployId}-${type}-data`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
})
|
||||
|
||||
fastify.delete('/:dbName', async (request, reply) => {
|
||||
const { dbName } = request.params
|
||||
await execShellAsync(`docker stack rm ${dbName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
const axios = require('axios')
|
||||
const User = require('../../../models/User')
|
||||
const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: { type: 'string' }
|
||||
},
|
||||
required: ['code']
|
||||
}
|
||||
}
|
||||
}
|
||||
fastify.get('/app', { schema: githubCodeSchema }, async (request, reply) => {
|
||||
const { code } = request.query
|
||||
try {
|
||||
const { data } = await axios({
|
||||
method: 'post',
|
||||
url: `https://github.com/login/oauth/access_token?client_id=${fastify.config.VITE_GITHUB_APP_CLIENTID}&client_secret=${fastify.config.GITHUB_APP_CLIENT_SECRET}&code=${code}`,
|
||||
headers: {
|
||||
accept: 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
const token = data.access_token
|
||||
const githubAxios = axios.create({
|
||||
baseURL: 'https://api.github.com'
|
||||
})
|
||||
|
||||
githubAxios.defaults.headers.common.Accept = 'Application/json'
|
||||
githubAxios.defaults.headers.common.Authorization = `token ${token}`
|
||||
|
||||
try {
|
||||
let uid = cuid()
|
||||
const { avatar_url } = (await githubAxios.get('/user')).data // eslint-disable-line
|
||||
const email = (await githubAxios.get('/user/emails')).data.filter(
|
||||
(e) => e.primary
|
||||
)[0].email
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const registeredUsers = await User.find().countDocuments()
|
||||
const foundUser = await User.findOne({ email })
|
||||
if (foundUser) {
|
||||
await User.findOneAndUpdate(
|
||||
{ email },
|
||||
{ avatar: avatar_url },
|
||||
{ upsert: true, new: true }
|
||||
)
|
||||
uid = foundUser.uid
|
||||
} else {
|
||||
if (registeredUsers === 0) {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (!settings && registeredUsers > 0) {
|
||||
reply.code(500).send('Registration disabled, enable it in settings.')
|
||||
} else {
|
||||
if (!settings.allowRegistration) {
|
||||
reply.code(500).send('You are not allowed here!')
|
||||
} else {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const jwtToken = jwt.sign({}, fastify.config.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolLabs',
|
||||
issuer: 'coolLabs',
|
||||
jwtid: uid,
|
||||
subject: `User:${uid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
reply
|
||||
.code(200)
|
||||
.redirect(
|
||||
302,
|
||||
`/api/v1/login/github/success?jwtToken=${jwtToken}&ghToken=${token}`
|
||||
)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
reply.code(500).send({ success: false, error: error.message })
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
return reply.sendFile('bye.html')
|
||||
})
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' }
|
||||
},
|
||||
required: ['allowRegistration']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
let settings = await Settings.findOne({ applicationName }).select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
if (!settings) {
|
||||
settings = {
|
||||
applicationName,
|
||||
allowRegistration: false
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
const settings = await Settings.findOneAndUpdate(
|
||||
{ applicationName },
|
||||
{ applicationName, ...request.body },
|
||||
{ upsert: true, new: true }
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
reply.code(200).send('NO')
|
||||
})
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash ./install.sh upgrade-phase-1')
|
||||
await saveServerLog({ event: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('bash ./install.sh upgrade-phase-2')
|
||||
await saveServerLog({ event: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const User = require('../../models/User')
|
||||
const jwt = require('jsonwebtoken')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const { authorization } = request.headers
|
||||
if (!authorization) {
|
||||
reply.code(401).send({})
|
||||
return
|
||||
}
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
found ? reply.code(200).send({}) : reply.code(401).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp, execShellAsync } = require('../../../libs/common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const cloneRepository = require('../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// TODO: Add this to fastify plugin
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ref: { type: 'string' },
|
||||
repository: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' },
|
||||
full_name: { type: 'string' }
|
||||
},
|
||||
required: ['id', 'full_name']
|
||||
},
|
||||
installation: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' }
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
required: ['ref', 'repository', 'installation']
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
if (request.headers['x-github-event'] !== 'push') {
|
||||
reply.code(500).send({ error: 'Not a push event.' })
|
||||
return
|
||||
}
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
let configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
|
||||
) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
})
|
||||
|
||||
if (!configuration) {
|
||||
reply.code(500).send({ error: 'No configuration found.' })
|
||||
return
|
||||
}
|
||||
|
||||
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
|
||||
|
||||
await cloneRepository(configuration)
|
||||
|
||||
let foundService = false
|
||||
let foundDomain = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id &&
|
||||
running.repository.branch !== configuration.repository.branch
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
foundService = true
|
||||
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundDomain) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Domain already used.' })
|
||||
return
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
} else {
|
||||
if (foundService && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, services, configChanged, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.' })
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: [
|
||||
'DOMAIN',
|
||||
'EMAIL',
|
||||
'VITE_GITHUB_APP_CLIENTID',
|
||||
'GITHUB_APP_CLIENT_SECRET',
|
||||
'GITHUB_APP_PRIVATE_KEY',
|
||||
'GITHUP_APP_WEBHOOK_SECRET',
|
||||
'JWT_SIGN_KEY',
|
||||
'SECRETS_ENCRYPTION_KEY'
|
||||
],
|
||||
properties: {
|
||||
DOMAIN: {
|
||||
type: 'string'
|
||||
},
|
||||
EMAIL: {
|
||||
type: 'string'
|
||||
},
|
||||
VITE_GITHUB_APP_CLIENTID: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_CLIENT_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_PRIVATE_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUP_APP_WEBHOOK_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
JWT_SIGN_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
DOCKER_ENGINE: {
|
||||
type: 'string',
|
||||
default: '/var/run/docker.sock'
|
||||
},
|
||||
DOCKER_NETWORK: {
|
||||
type: 'string',
|
||||
default: 'coollabs'
|
||||
},
|
||||
SECRETS_ENCRYPTION_KEY: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { schema }
|
||||
@@ -1,90 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const Deployment = require('./models/Deployment')
|
||||
const fastify = require('fastify')({
|
||||
logger: { level: 'error' }
|
||||
})
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { schema } = require('./schema')
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../dist/')
|
||||
})
|
||||
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
reply.sendFile('index.html')
|
||||
})
|
||||
} else {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../public/')
|
||||
})
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
fastify.setErrorHandler(async (error, request, reply) => {
|
||||
console.log(error)
|
||||
if (error.statusCode) {
|
||||
reply.status(error.statusCode).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
} else {
|
||||
reply.status(500).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
}
|
||||
await saveServerLog({ event: error })
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
`mongodb://${process.env.MONGODB_USER}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DB}?authSource=${process.env.MONGODB_DB}&readPreference=primary&ssl=false`,
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
} else {
|
||||
mongoose.connect(
|
||||
'mongodb://localhost:27017/coolify?&readPreference=primary&ssl=false',
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
}
|
||||
|
||||
mongoose.connection.on(
|
||||
'error',
|
||||
console.error.bind(console, 'connection error:')
|
||||
)
|
||||
mongoose.connection.once('open', async function () {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.listen(3000, '0.0.0.0')
|
||||
console.log('Coolify API is up and running in production.')
|
||||
} else {
|
||||
const logFile = fs.createWriteStream('api/development/console.log', { flags: 'w' })
|
||||
const logStdout = process.stdout
|
||||
|
||||
console.log = function (d) {
|
||||
logFile.write(`[INFO]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.error = function (d) {
|
||||
logFile.write(`[ERROR]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.warn = function (d) {
|
||||
logFile.write(`[WARN]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
const deployments = await Deployment.find({ progress: { $in: ['queued', 'inprogress'] } })
|
||||
for (const deployment of deployments) {
|
||||
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } })
|
||||
}
|
||||
})
|
||||
11
data/docker/daemon.json
Normal file
11
data/docker/daemon.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"log-driver": "json-file",
|
||||
"log-opts": {
|
||||
"max-size": "100m",
|
||||
"max-file": "5"
|
||||
},
|
||||
"features": {
|
||||
"buildkit": true
|
||||
},
|
||||
"live-restore": true
|
||||
}
|
||||
6
data/fluentd/Dockerfile-dev
Normal file
6
data/fluentd/Dockerfile-dev
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM fluent/fluent-bit:1.9.0
|
||||
COPY fluentbit-dev.conf /tmp/fluentbit.conf
|
||||
ENTRYPOINT ["/fluent-bit/bin/fluent-bit", "-c", "/tmp/fluentbit.conf"]
|
||||
# USER root
|
||||
# RUN ["gem", "install", "fluent-plugin-mongo"]
|
||||
# USER fluent
|
||||
24
data/fluentd/fluentbit-dev.conf
Normal file
24
data/fluentd/fluentbit-dev.conf
Normal file
@@ -0,0 +1,24 @@
|
||||
[INPUT]
|
||||
Name forward
|
||||
Listen 0.0.0.0
|
||||
Port 24224
|
||||
Buffer_Chunk_Size 32KB
|
||||
Buffer_Max_Size 64KB
|
||||
|
||||
[OUTPUT]
|
||||
Name influxdb
|
||||
Match *
|
||||
Host coolify-influxdb
|
||||
Port 8086
|
||||
Bucket containerlogs
|
||||
Org organization
|
||||
HTTP_Token supertoken
|
||||
Sequence_Tag _seq
|
||||
Tag_Keys container_name
|
||||
[OUTPUT]
|
||||
Name http
|
||||
Match *
|
||||
Host host.docker.internal
|
||||
Port 3000
|
||||
URI /logs.json
|
||||
Format json
|
||||
28
data/fluentd/fluentd-dev.conf
Normal file
28
data/fluentd/fluentd-dev.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
<source>
|
||||
@type forward
|
||||
port 24224
|
||||
bind 0.0.0.0
|
||||
</source>
|
||||
|
||||
<match **>
|
||||
@type http
|
||||
endpoint http://host.docker.internal:3000/logs.json
|
||||
<buffer>
|
||||
flush_at_shutdown true
|
||||
flush_mode immediate
|
||||
flush_thread_count 8
|
||||
flush_thread_interval 1
|
||||
flush_thread_burst_interval 1
|
||||
retry_forever true
|
||||
retry_type exponential_backoff
|
||||
</buffer>
|
||||
</match>
|
||||
|
||||
<filter docker.**>
|
||||
@type parser
|
||||
key_name log
|
||||
reserve_data true
|
||||
<parse>
|
||||
@type json
|
||||
</parse>
|
||||
</filter>
|
||||
6
data/haproxy-http.Dockerfile
Normal file
6
data/haproxy-http.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-http.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
data/haproxy-tcp.Dockerfile
Normal file
6
data/haproxy-tcp.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg-tcp.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
6
data/haproxy.Dockerfile
Normal file
6
data/haproxy.Dockerfile
Normal file
@@ -0,0 +1,6 @@
|
||||
FROM haproxytech/haproxy-alpine:2.5
|
||||
RUN mkdir -p /usr/local/etc/haproxy/ssl /usr/local/etc/haproxy/maps /usr/local/etc/haproxy/spoe
|
||||
|
||||
COPY data/haproxy/haproxy.cfg.template /usr/local/etc/haproxy/haproxy.cfg
|
||||
COPY data/haproxy/dataplaneapi.hcl /usr/local/etc/haproxy/dataplaneapi.hcl
|
||||
COPY data/haproxy/ssl/default.pem /usr/local/etc/haproxy/ssl/default.pem
|
||||
29
data/haproxy/dataplaneapi.hcl
Normal file
29
data/haproxy/dataplaneapi.hcl
Normal file
@@ -0,0 +1,29 @@
|
||||
config_version = 2
|
||||
name = "easy_gar"
|
||||
mode = "single"
|
||||
status = "null"
|
||||
|
||||
dataplaneapi {
|
||||
host = "0.0.0.0"
|
||||
port = 5555
|
||||
|
||||
transaction {
|
||||
transaction_dir = "/tmp/haproxy"
|
||||
}
|
||||
|
||||
advertised {
|
||||
api_address = ""
|
||||
api_port = 0
|
||||
}
|
||||
}
|
||||
|
||||
haproxy {
|
||||
config_file = "/usr/local/etc/haproxy/haproxy.cfg"
|
||||
haproxy_bin = "/usr/local/sbin/haproxy"
|
||||
|
||||
reload {
|
||||
reload_delay = 2
|
||||
reload_cmd = "kill -HUP 1"
|
||||
restart_cmd = "kill -SIGUSR2 1"
|
||||
}
|
||||
}
|
||||
19
data/haproxy/haproxy.cfg-http.template
Normal file
19
data/haproxy/haproxy.cfg-http.template
Normal file
@@ -0,0 +1,19 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 120s
|
||||
timeout connect 20s
|
||||
timeout client 120s
|
||||
timeout server 120s
|
||||
|
||||
frontend "${APP}"
|
||||
mode http
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode http
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
15
data/haproxy/haproxy.cfg-tcp.template
Normal file
@@ -0,0 +1,15 @@
|
||||
global
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode tcp
|
||||
log global
|
||||
|
||||
frontend "${APP}"
|
||||
mode tcp
|
||||
bind *:"${PORT}" name "${APP}"
|
||||
default_backend "${APP}"
|
||||
|
||||
backend "${APP}"
|
||||
mode tcp
|
||||
server "${APP}" "${APP}":"${PRIVATE_PORT}" check
|
||||
38
data/haproxy/haproxy.cfg.template
Normal file
38
data/haproxy/haproxy.cfg.template
Normal file
@@ -0,0 +1,38 @@
|
||||
global
|
||||
stats socket /var/run/api.sock user haproxy group haproxy mode 660 level admin expose-fd listeners
|
||||
log stdout format raw local0 debug
|
||||
|
||||
defaults
|
||||
mode http
|
||||
log global
|
||||
timeout http-request 120s
|
||||
timeout connect 20s
|
||||
timeout client 120s
|
||||
timeout server 120s
|
||||
|
||||
userlist haproxy-dataplaneapi
|
||||
user admin insecure-password "${HAPROXY_PASSWORD}"
|
||||
|
||||
frontend http
|
||||
mode http
|
||||
bind :80
|
||||
bind :443 ssl crt /usr/local/etc/haproxy/ssl/ alpn h2,http/1.1
|
||||
acl is_certbot path_beg /.well-known/acme-challenge/
|
||||
use_backend backend-certbot if is_certbot
|
||||
use_backend %[req.hdr(host),lower]
|
||||
|
||||
frontend stats
|
||||
bind *:8404
|
||||
stats enable
|
||||
stats uri /
|
||||
stats refresh 5s
|
||||
stats admin if TRUE
|
||||
stats auth "${HAPROXY_USERNAME}:${HAPROXY_PASSWORD}"
|
||||
|
||||
backend backend-certbot
|
||||
mode http
|
||||
server certbot host.docker.internal:9080
|
||||
|
||||
program api
|
||||
command /usr/bin/dataplaneapi -f /usr/local/etc/haproxy/dataplaneapi.hcl --userlist haproxy-dataplaneapi
|
||||
no option start-on-reload
|
||||
81
data/haproxy/ssl/default.pem
Normal file
81
data/haproxy/ssl/default.pem
Normal file
@@ -0,0 +1,81 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFETCCAvkCFE/5JtU5geT5hOjFuQPiLgCYHwsOMA0GCSqGSIb3DQEBCwUAMEUx
|
||||
CzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRl
|
||||
cm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMjExMDExMDkwNzQ1WhcNMzExMDA5MDkw
|
||||
NzQ1WjBFMQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UE
|
||||
CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOC
|
||||
Ag8AMIICCgKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
ATANBgkqhkiG9w0BAQsFAAOCAgEAGQED96wBGzbMUlk9mIvZeLerzEAB3YfgfAYa
|
||||
EAi79QHxM8UX06xmA2xtGvJSvlU8Xods9vxpBmIUnbDRTIAHNDApT19+vPg/iSfQ
|
||||
1J9Fo4b5kjmWL6SalEdYcxqH9V/QndHta4MXP91u/ZsJ/exwDTZFatXsfGkPjUmN
|
||||
Xp+Ip6iQg7+kV3JpRnMSbevj2Oujs7qTAdQedH38ZTNS0AaM5gvZyQkccCTKNBQ4
|
||||
3O8MhCau7U0EUirndqsQXa0D3o78FpKztLNXSM7919jU2y36kMrWXfArfrBKHJ9b
|
||||
nZeO7nkbHgvmVS8NTg9pR7L7u+YXTa2p1H2ZnpMQvruV7iL/Pb1H2N68UdvnQScL
|
||||
sgacGSzM6b6PVdWRbECiuzC0UyWLZo/LoU3DQFGoiDQ4e/B3+TMrvgFI0CnpAQ4w
|
||||
qiaVFJlRQeF4GaS4qHsN28OBliFATB3TXONFnz1aVkQlEHuh2+JbuL1b1lxvlX5t
|
||||
gBbu/GgAcP4Uy2z4PoDmempAvNi2kCcLB98m+jbFSMSB3nkrdj6MzyN7kW9bhk3T
|
||||
ClimxDmc23seprwLcxJUPP5q+HRB1VLKXLwIYxu+Up3g29d4k1Iy9nUUP9lITLTk
|
||||
blJxZ2BPuQqTLzyqmAEWa1HxljFC1b7oMp9a98PbxC3MxUggM7zx/rgXWxM8osib
|
||||
uwSZmw0=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEArEZDhvc3ew2Gb8pvJlUrh5x+L5iqNxDYU3cOcOgeELCmOyAS
|
||||
cH+/1xrsHQI05xWPpz6VAja2NKl4OP90getPPkiQV1xAg5/gsOsRL8Pi/MwvQKfZ
|
||||
ObyW3t+sfFb1K3sVnm8bgk5F9OIVyAtzAx+Y53muEJsHOHpaEidnwbY2VE0zQB/G
|
||||
DBQovrMefAwmH4RPqFor6NzFMKVRi33pQjYmcfCVFZylrDeCn8T7llV0lrnWqv6z
|
||||
sGKfL3E4nHvyh/RsGNOXy+XQMxB9SA3j6hFTNtgQIPO/lxptz/+BLZoUt48nHZtr
|
||||
sc5j+3sn8c1O9e6MjI/1q8lvZsk7ZsWCGSwCOvJ9LnxCWOEQUUfqIvGLsk7NJQgf
|
||||
IkodZH9sW5Sjlro21+WBf3nvqlZ8g7r6K1RJOA8AtUiCaN/+o65t86WkwCSwQXcm
|
||||
+nArcwddOx2HN9sFrjJ59N1eYEDGmyK3BdppYuVXay705PmxotR1hCBvnXOb34dn
|
||||
gZxsxFTohr97JvEdNtGSNz4USyZPjgIMF/Gu8ruh0gQ1byhmayRqMGEqMAh58Lvb
|
||||
3HYsd3Bf+LB9PpaXLAdKzsTZ8a28zyDYo8a70h7iBRxhmFwa+Df+pSmUEdzhejfx
|
||||
7jEslhBQSQDmllaHrHc1G6H/w/u+04vi1joaLeLEGQclinKLeU88s9j3zzUCAwEA
|
||||
AQKCAgEAm1/z33Jwk4crTQAjJ0uBqxm1pW/ndSq4MO8cEzEGjL8F7iWK+/P8LiGV
|
||||
+sPWuuRzX7/N3OVDiFOgnqeniNWV7vK7XE9T0GMN4ALiyVW/D4mIxKOeA7jXycOq
|
||||
aap0DPdCFFbZVLkL10Vhp77LyHFjEsJn/4oTBRk0y1LG/as9bOMD6j29/X7hEL20
|
||||
LOU4LQzEW26YU7lqD+nKlijFjHYSTolRrOBPe/fE1BxxXLFOKfMKbcaygc8xCzTu
|
||||
fhQ8Nep45BtSuQ9Yq/WfSLFecemWR8yvH0k37yxjBknHVD23maZ+/PEEPKWM/2+g
|
||||
IzGsmZrBILVmOb2/v9CWxqY0JEfQ6aU/nLW1ZiXSOIPmKEooK/hPVxFIyQ1yET4G
|
||||
kQZ5RroY/QDrI14ms8P0iDzZ8K3EFKUyjiBbc83Mb0YIZ4hKd76gioOUIPeEQB+y
|
||||
QLZ8Cb9YS3V8uIOJg+F4xlpJSePAZphfSxRLojSiKUeCs8gNUxGz0zwiMNf1p76F
|
||||
8CaLgvSwT/cgQjWitMeeE1Ha+8lY8VzESmd10gPk2uES/qdrmMhwFwovPqqrtMqj
|
||||
kMrFKNy7Crka6me3dhKEtryRTk5ho2IS/VCy/eXQ7lUW8Cl4uFxmjpHYSJMqDWvC
|
||||
vu1p3/B1psSZIy2V2M9QqwZCysHqvGJMOCvYmnc6T62+kDRKQ7ECggEBAOS/1ptA
|
||||
75OBAsHLovkspiCvn3gb/VTvH6LOvxYTohjr5iBeX137vg0aR1rg0jwcdf8EEJYw
|
||||
4YxOid7KmV7O25ujzduQgwpVgujnJAeBLeLDC5dVbq3PQah61AvR2O/7t+Ls3oxi
|
||||
cWh/OHC6SeZ/n406cxSCCUpVwtgHTaNFzaSmpDdEOSbjvXjQQjiRsG7j/1u64riq
|
||||
RlJ/hIUlcys0g94yeN/5lPaNfsq0+vTSAYuTVVXVbEntwWcZVZxnQJviZVgJ99zM
|
||||
RzE3sprvvr+I5QQ4FRMn0W9U7gblSJd5FGEL8gye4SRd+LxoUL4DR6pfuwd0vlXA
|
||||
g+dgiOKoHm2Bb8cCggEBAMDMHMNR6uipdMivPjBTlklnaYd9SY3c5x65yNtx4CNh
|
||||
rXyvy/6YvME7PPnKQZ8TQ4DkbVDUCAF7wnyAJJ7eWMav3bNlqWWjzaBvQz4Fn0XG
|
||||
/1W5R1CoJ9DW5FY3f9efJzQTmfn6dIlCx1gW7XfVBZQqI1LORMWUYenk0KAvjlg2
|
||||
UHYYl/BT1RhtYyzOJHto1PaUvCNDiOiDWAkTpLigYm7hGgVmcSwbo4F54SNUHdV7
|
||||
yz3CorCM4VsEYYSL80WHYxf/Zc+mcIDoWOdog0iEeK/Zu++yG5lPRxC1862GmsZA
|
||||
J06BMqX+NVGOfiGcVaGH+SZJXeFcrr7F8ZWp6y38QSMCggEAJwzo4hAv1gqMIfFV
|
||||
nRwWMDZLDwIYOUupJu4MiQRJA+AhpRz3QuAbDbmSvNzshv6E1kgnXLxzhLRTrQkB
|
||||
LcI6k1NfbUA6XqVCd+gdqnpPDwslC2y2PE3Jc62kTXBBjJZ4SfEN/QFBQwmU5Qmo
|
||||
XAUlg8KaqsGYPGxvmtmEU38zIAyitByddRoj2mATLf0RFZ0ulsZMtiG7Z5IFWYWP
|
||||
J60LZf9Py0ycNYrqPkivHuRLBzzbsI+CsQw5nBQjHVQzH2mCy4jIG5V0Ad70Sqbq
|
||||
9V+1WQcJ8f82Lb9d8ydpQRKWfArCA42L+d1g/SkBv65nqZo2H4u6goEfA3zjYW45
|
||||
44/ZOQKCAQB440MhwYqe6ioc76z51l+ElUAZQZjOR/XvUSS9XHDjHosOhJhPgmvQ
|
||||
aZl5MrXkzcpk1lYo+Vovu+8d66eKqfZWVs2XgCYwYf48G6e5CwNsWDOgB7XMwDN/
|
||||
Ak9YNCKIC/Yj9Cp3EPDjZCjkdjPeEIcX+Tf+4vFCRiEC7INX/ZmufBgFhLQ4cAhM
|
||||
8cHexT8g1oG6P1acces1h626u0NstLwjtCeBvVM3CfmC5O4jHco7Iw00I4epVhyz
|
||||
2lJfLvWR4itjT7QB+OXQHmAocWLoJJAcC1WJHU+q2IfB1aT+aElCB9XdpqsgY/4A
|
||||
rm0uG/2hdEXoGNaxyVCUtD8fzdR2GBarAoIBACCYXXREMYb6i1TbR5Q2LvVQUPsO
|
||||
Hgnbr+PLmx93rfUzDcr5r+cJgryjYQDKJTRleDJhg80M3RYOq+IOdl6yxOmRATmJ
|
||||
ZDgwRVD1F6VFxBJePcAW30FI5CoBogsHaZQDKGsopEaDRLK5E3QHUVG5qj323RdI
|
||||
Unf1++wI4nw+qwsVf1gSTcAdzq29v3NIWUyvvrmTNO4MxFTt0/lqkCsdT/2EFQDB
|
||||
/yQ1HCtQQjXE1xlYh0BnMZp9+4FmrlMC9Oj5H0dDSWmInPION0ft8/SjBj4TQ5Qi
|
||||
2DUo1WOWQnVR8Bxz0B8McXS+dOmgLe8ws4/ez7DoEVqHTgirKqBg5qRFQKw=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
1
data/prisma/build-prisma-engine.sh
Normal file
1
data/prisma/build-prisma-engine.sh
Normal file
@@ -0,0 +1 @@
|
||||
docker build --platform linux/amd64,linux/arm64 -t coollabsio/prisma-engine -f prisma-engine.Dockerfile --push .
|
||||
10
data/prisma/prisma-engine.Dockerfile
Normal file
10
data/prisma/prisma-engine.Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM rust:1.58.1-alpine3.14 as prisma
|
||||
WORKDIR /prisma
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
RUN apk --no-cache add openssl direnv git musl-dev openssl-dev build-base perl protoc
|
||||
RUN git clone --depth=1 --branch=3.12.x https://github.com/prisma/prisma-engines.git /prisma
|
||||
RUN cargo build --release
|
||||
|
||||
FROM alpine
|
||||
WORKDIR /prisma-engines
|
||||
COPY --from=prisma /prisma/target/release/query-engine /prisma/target/release/migration-engine /prisma/target/release/introspection-engine /prisma/target/release/prisma-fmt /prisma-engines/
|
||||
23
data/traefik/docker-compose-tcp.yaml
Normal file
23
data/traefik/docker-compose-tcp.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.5'
|
||||
|
||||
services:
|
||||
${ID}:
|
||||
container_name: proxy-for-${PORT}
|
||||
image: traefik:v2.6
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --entrypoints.web.address=:${PORT}
|
||||
- --providers.docker=false
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.http.endpoint=http://host.docker.internal:3000/traefik.json?id=${ID}
|
||||
- --providers.http.pollTimeout=5s
|
||||
- --log.level=error
|
||||
ports:
|
||||
- '${PORT}:${PORT}'
|
||||
networks:
|
||||
- ${NETWORK}
|
||||
|
||||
networks:
|
||||
net:
|
||||
external: false
|
||||
name: ${NETWORK}
|
||||
0
db/.gitkeep
Normal file
0
db/.gitkeep
Normal file
35
docker-compose-dev.yaml
Normal file
35
docker-compose-dev.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:6.2-alpine
|
||||
container_name: coolify-redis
|
||||
networks:
|
||||
- coolify-infra
|
||||
ports:
|
||||
- target: 6379
|
||||
published: 6379
|
||||
protocol: tcp
|
||||
mode: host
|
||||
# fluentbit:
|
||||
# container_name: coolify-fluentbit
|
||||
# build:
|
||||
# context: ./data/fluentd
|
||||
# dockerfile: Dockerfile-dev
|
||||
# ports:
|
||||
# - target: 24224
|
||||
# published: 24224
|
||||
# protocol: tcp
|
||||
# mode: host
|
||||
# - target: 24224
|
||||
# published: 24224
|
||||
# protocol: udp
|
||||
# mode: host
|
||||
# networks:
|
||||
# - coolify-infra
|
||||
# extra_hosts:
|
||||
# - 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
23
docker-compose-haproxy.yaml
Normal file
23
docker-compose-haproxy.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
haproxy:
|
||||
image: coollabsio/coolify-haproxy-alpine:latest
|
||||
container_name: coolify-haproxy
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
- coolify
|
||||
volumes:
|
||||
- './data/haproxy/:/usr/local/etc/haproxy/'
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
- '8404:8404'
|
||||
- '5555:5555'
|
||||
- '3306:3306'
|
||||
|
||||
networks:
|
||||
coolify:
|
||||
attachable: true
|
||||
name: coolify
|
||||
29
docker-compose-traefik.yaml
Normal file
29
docker-compose-traefik.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.6
|
||||
command:
|
||||
- --api.insecure=true
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --providers.docker=false
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.http.endpoint=http://host.docker.internal:3000/traefik.json
|
||||
- --providers.http.pollTimeout=5s
|
||||
- --log.level=error
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
- '8080:8080'
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
networks:
|
||||
- coolify-infra
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
43
docker-compose.yaml
Normal file
43
docker-compose.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
coolify:
|
||||
image: coollabsio/coolify:${TAG:-latest}
|
||||
restart: always
|
||||
container_name: coolify
|
||||
ports:
|
||||
- target: 3000
|
||||
published: 3000
|
||||
protocol: tcp
|
||||
mode: host
|
||||
volumes:
|
||||
- 'coolify-db:/app/db'
|
||||
- 'coolify-ssl-certs:/app/ssl'
|
||||
- 'coolify-letsencrypt:/etc/letsencrypt'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
env_file:
|
||||
- '.env'
|
||||
networks:
|
||||
- coolify-infra
|
||||
depends_on: ['redis']
|
||||
redis:
|
||||
image: redis:6.2-alpine
|
||||
restart: always
|
||||
container_name: coolify-redis
|
||||
networks:
|
||||
- coolify-infra
|
||||
|
||||
networks:
|
||||
coolify-infra:
|
||||
attachable: true
|
||||
name: coolify-infra
|
||||
|
||||
volumes:
|
||||
coolify-db:
|
||||
name: coolify-db
|
||||
coolify-ssl-certs:
|
||||
name: coolify-ssl-certs
|
||||
coolify-letsencrypt:
|
||||
name: coolify-letsencrypt
|
||||
coolify-traefik-letsencrypt:
|
||||
name: coolify-traefik-letsencrypt
|
||||
19
index.html
19
index.html
@@ -1,19 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" href="/favicon.png" />
|
||||
<link rel="preload" as="image" href="/favicon.png">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>coolify: Heroku & Netlify alternative</title>
|
||||
<link rel="dns-prefetch" href="https://cdn.coollabs.io/" />
|
||||
<link rel="preconnect" href="https://cdn.coollabs.io/" crossorigin="" />
|
||||
<link rel="stylesheet" href="https://cdn.coollabs.io/fonts/montserrat/montserrat.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<script type="module" src="/src/index.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
43
install.sh
43
install.sh
@@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" git pull
|
||||
echo "#### Building base image."
|
||||
docker build -t coolify-base -f install/Dockerfile-base .
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '#### Ooops something not okay!'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "#### Checking configuration."
|
||||
docker run --rm -w /usr/src/app coolify-base node install/install.js --check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo '#### Missing configuration.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
"all")
|
||||
echo "#### Rebuild everything."
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type all
|
||||
;;
|
||||
"coolify")
|
||||
echo "#### Rebuild coolify."
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type coolify
|
||||
;;
|
||||
"proxy")
|
||||
echo "#### Rebuild proxy."
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type proxy
|
||||
;;
|
||||
"upgrade-phase-1")
|
||||
echo "#### Rebuild coolify from frontend request phase 1."
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/install.js --type upgrade
|
||||
;;
|
||||
"upgrade-phase-2")
|
||||
echo "#### Rebuild coolify from frontend request phase 2."
|
||||
docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v /data/coolify:/data/coolify -u root -w /usr/src/app coolify-base node install/update.js --type upgrade
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Use 'all' to build & deploy proxy+coolify, 'coolify' to build & deploy only coolify, 'proxy' to build & deploy only proxy."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,5 +0,0 @@
|
||||
FROM coolify-base
|
||||
WORKDIR /usr/src/app
|
||||
RUN yarn build
|
||||
CMD ["yarn", "start"]
|
||||
EXPOSE 3000
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM ubuntu:20.04 as binaries
|
||||
RUN apt update && apt install -y curl gnupg2 ca-certificates
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN echo 'deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable' >> /etc/apt/sources.list
|
||||
RUN curl -L https://github.com/a8m/envsubst/releases/download/v1.2.0/envsubst-`uname -s`-`uname -m` -o /usr/bin/envsubst
|
||||
RUN chmod +x /usr/bin/envsubst
|
||||
RUN apt update && apt install -y docker-ce-cli && apt clean all
|
||||
|
||||
FROM node:14 as modules
|
||||
COPY --from=binaries /usr/bin/docker /usr/bin/docker
|
||||
COPY --from=binaries /usr/bin/envsubst /usr/bin/envsubst
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package*.json .
|
||||
RUN yarn install
|
||||
|
||||
FROM modules
|
||||
WORKDIR /usr/src/app
|
||||
COPY . .
|
||||
@@ -1,97 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: traefik:v2.3
|
||||
hostname: coollabs-proxy
|
||||
ports:
|
||||
- target: 80
|
||||
published: 80
|
||||
protocol: tcp
|
||||
mode: host
|
||||
- target: 443
|
||||
published: 443
|
||||
protocol: tcp
|
||||
mode: host
|
||||
command:
|
||||
- --api.insecure=false
|
||||
- --api.dashboard=false
|
||||
- --api.debug=false
|
||||
- --log.level=ERROR
|
||||
- --providers.docker=true
|
||||
- --providers.docker.swarmMode=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=${DOCKER_NETWORK}
|
||||
- --entrypoints.web.address=:80
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge=true
|
||||
- --certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web
|
||||
- --certificatesresolvers.letsencrypt.acme.email=${EMAIL}
|
||||
- --certificatesresolvers.letsencrypt.acme.storage=/data/coolify/acme.json
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /data/coolify:/data/coolify
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
placement:
|
||||
constraints:
|
||||
- node.role == manager
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.api.entrypoints=websecure"
|
||||
- "traefik.http.routers.api.service=api@internal"
|
||||
- "traefik.http.routers.api.middlewares=auth"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=80"
|
||||
- "traefik.http.services.traefik.loadbalancer.server.port=443"
|
||||
|
||||
# Global redirect www to non-www
|
||||
- "traefik.http.routers.www-catchall.rule=hostregexp(`{host:www.(.+)}`)"
|
||||
- "traefik.http.routers.www-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.www-catchall.middlewares=redirect-www-to-nonwww"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.regex=^http://(?:www\\.)?(.+)"
|
||||
- "traefik.http.middlewares.redirect-www-to-nonwww.redirectregex.replacement=http://$$$${1}"
|
||||
|
||||
# Global redirect http to https
|
||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
||||
- "traefik.http.routers.http-catchall.middlewares=redirect-to-https"
|
||||
|
||||
- "traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https"
|
||||
- "traefik.http.middlewares.global-compress.compress=true"
|
||||
|
||||
coolify:
|
||||
image: coolify
|
||||
hostname: coollabs-coolify
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- ${DOCKER_NETWORK}
|
||||
command: "yarn start"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
deploy:
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
replicas: 1
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.coolify.entrypoints=websecure"
|
||||
- "traefik.http.routers.coolify.tls.certresolver=letsencrypt"
|
||||
- "traefik.http.routers.coolify.rule=Host(`${DOMAIN}`) && PathPrefix(`/`)"
|
||||
- "traefik.http.services.coolify.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.coolify.middlewares=global-compress"
|
||||
|
||||
networks:
|
||||
${DOCKER_NETWORK}:
|
||||
driver: overlay
|
||||
name: ${DOCKER_NETWORK}
|
||||
external: true
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const fastify = require('fastify')()
|
||||
const { schema } = require('../api/schema')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
if (program.check) {
|
||||
checkConfig().then(() => {
|
||||
console.log('Config: OK')
|
||||
}).catch((err) => {
|
||||
console.log('Config: NOT OK')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
} else {
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
shell.exec(`docker network create ${process.env.DOCKER_NETWORK} --driver overlay`, { silent: !program.debug })
|
||||
shell.exec('docker build -t coolify -f install/Dockerfile .')
|
||||
if (program.type === 'all') {
|
||||
shell.exec('docker stack rm coollabs-coolify', { silent: !program.debug })
|
||||
} else if (program.type === 'coolify') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
} else if (program.type === 'proxy') {
|
||||
shell.exec('docker service rm coollabs-coolify_proxy')
|
||||
}
|
||||
if (program.type !== 'upgrade') shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !program.debug, shell: '/bin/bash' })
|
||||
}
|
||||
|
||||
function checkConfig () {
|
||||
return new Promise((resolve, reject) => {
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
.ready((err) => {
|
||||
if (err) reject(err)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const { program } = require('commander')
|
||||
const shell = require('shelljs')
|
||||
const user = shell.exec('whoami', { silent: true }).stdout.replace('\n', '')
|
||||
|
||||
program.version('0.0.1')
|
||||
program
|
||||
.option('-d, --debug', 'Debug outputs.')
|
||||
.option('-c, --check', 'Only checks configuration.')
|
||||
.option('-t, --type <type>', 'Deploy type.')
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
if (user !== 'root') {
|
||||
console.error(`Please run as root! Current user: ${user}`)
|
||||
process.exit(1)
|
||||
}
|
||||
if (program.type === 'upgrade') {
|
||||
shell.exec('docker service rm coollabs-coolify_coolify')
|
||||
shell.exec('set -a && source .env && set +a && envsubst < install/coolify-template.yml | docker stack deploy -c - coollabs-coolify', { silent: !program.debug, shell: '/bin/bash' })
|
||||
}
|
||||
155
package.json
155
package.json
@@ -1,63 +1,96 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source, hassle-free, self-hostable Heroku & Netlify alternative.",
|
||||
"version": "1.0.1",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"start": "NODE_ENV=production node api/server",
|
||||
"dev": "run-p dev:db dev:routify dev:svite dev:server",
|
||||
"dev:db": "NODE_ENV=development node api/development/mongodb.js",
|
||||
"dev:server": "nodemon -w api api/server",
|
||||
"dev:routify": "routify run",
|
||||
"dev:svite": "svite",
|
||||
"build": "run-s build:routify build:svite",
|
||||
"build:routify": "routify run -b",
|
||||
"build:svite": "svite build"
|
||||
},
|
||||
"dependencies": {
|
||||
"@roxi/routify": "^2.7.3",
|
||||
"@zerodevx/svelte-toast": "^0.1.4",
|
||||
"axios": "^0.21.0",
|
||||
"commander": "^6.2.1",
|
||||
"cuid": "^2.1.8",
|
||||
"dayjs": "^1.10.4",
|
||||
"deepmerge": "^4.2.2",
|
||||
"dockerode": "^3.2.1",
|
||||
"dotenv": "^8.2.0",
|
||||
"fastify": "^3.9.1",
|
||||
"fastify-env": "^2.1.0",
|
||||
"fastify-jwt": "^2.1.3",
|
||||
"fastify-plugin": "^3.0.0",
|
||||
"fastify-static": "^3.3.0",
|
||||
"generate-password": "^1.6.0",
|
||||
"js-yaml": "^4.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"mongoose": "^5.11.4",
|
||||
"shelljs": "^0.8.4",
|
||||
"unique-names-generator": "^4.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mongodb-memory-server-core": "^6.9.3",
|
||||
"nodemon": "^2.0.6",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"postcss": "^7.0.35",
|
||||
"postcss-import": "^12.0.1",
|
||||
"postcss-load-config": "^3.0.0",
|
||||
"postcss-preset-env": "^6.7.0",
|
||||
"prettier": "1.19",
|
||||
"prettier-plugin-svelte": "^2.1.6",
|
||||
"standard": "^16.0.3",
|
||||
"svelte": "^3.29.7",
|
||||
"svelte-hmr": "^0.12.2",
|
||||
"svelte-preprocess": "^4.6.1",
|
||||
"svite": "0.8.1",
|
||||
"tailwindcss": "compat"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte",
|
||||
"routify",
|
||||
"fastify",
|
||||
"tailwind"
|
||||
]
|
||||
"name": "coolify",
|
||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||
"version": "2.9.4",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"dev": "docker-compose -f docker-compose-dev.yaml up -d && cross-env NODE_ENV=development & svelte-kit dev --host 0.0.0.0",
|
||||
"dev:stop": "docker-compose -f docker-compose-dev.yaml down",
|
||||
"dev:logs": "docker-compose -f docker-compose-dev.yaml logs -f --tail 10",
|
||||
"studio": "npx prisma studio",
|
||||
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node build/index.js",
|
||||
"build": "svelte-kit build",
|
||||
"preview": "svelte-kit preview",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-check --tsconfig ./tsconfig.json --watch",
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||
"release:production:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:production:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:production:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
|
||||
"release:staging:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:staging:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:staging:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
|
||||
"release:haproxy": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-alpine:latest -t coollabsio/coolify-haproxy-alpine:1.1.0 -f data/haproxy.Dockerfile --push .",
|
||||
"release:haproxy:tcp": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-tcp-alpine:latest -t coollabsio/coolify-haproxy-tcp-alpine:1.1.0 -f data/haproxy-tcp.Dockerfile --push .",
|
||||
"release:haproxy:http": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-http-alpine:latest -t coollabsio/coolify-haproxy-http-alpine:1.1.0 -f data/haproxy-http.Dockerfile --push .",
|
||||
"prepare": "husky install"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-node": "1.0.0-next.73",
|
||||
"@sveltejs/adapter-static": "1.0.0-next.31",
|
||||
"@sveltejs/kit": "1.0.0-next.334",
|
||||
"@types/js-cookie": "3.0.2",
|
||||
"@types/js-yaml": "4.0.5",
|
||||
"@types/node": "17.0.34",
|
||||
"@types/node-forge": "1.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "4.31.1",
|
||||
"@typescript-eslint/parser": "4.31.1",
|
||||
"@zerodevx/svelte-toast": "0.7.1",
|
||||
"autoprefixer": "10.4.7",
|
||||
"cross-env": "7.0.3",
|
||||
"cross-var": "1.1.0",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-svelte3": "3.4.1",
|
||||
"husky": "7.0.4",
|
||||
"lint-staged": "12.4.1",
|
||||
"postcss": "8.4.13",
|
||||
"prettier": "2.6.2",
|
||||
"prettier-plugin-svelte": "2.7.0",
|
||||
"prettier-plugin-tailwindcss": "0.1.11",
|
||||
"prisma": "3.11.1",
|
||||
"svelte": "3.48.0",
|
||||
"svelte-check": "2.7.1",
|
||||
"svelte-preprocess": "4.10.6",
|
||||
"svelte-select": "4.4.7",
|
||||
"sveltekit-i18n": "2.2.1",
|
||||
"tailwindcss": "3.0.24",
|
||||
"ts-node": "10.7.0",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.4"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.11.1",
|
||||
"@sentry/node": "6.19.7",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bullmq": "1.82.2",
|
||||
"compare-versions": "4.1.3",
|
||||
"cookie": "0.5.0",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.2",
|
||||
"dockerode": "3.3.1",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.0.4",
|
||||
"is-ip": "4.0.0",
|
||||
"js-cookie": "3.0.1",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"mustache": "4.2.0",
|
||||
"node-forge": "1.3.1",
|
||||
"node-os-utils": "1.3.6",
|
||||
"p-limit": "4.0.0",
|
||||
"svelte-kit-cookie-session": "2.1.4",
|
||||
"tailwindcss-scrollbar": "0.1.0",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.cjs"
|
||||
}
|
||||
}
|
||||
|
||||
10123
pnpm-lock.yaml
generated
10123
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
postcss.config.cjs
Normal file
6
postcss.config.cjs
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {}
|
||||
}
|
||||
};
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
plugins: [
|
||||
require('postcss-import'),
|
||||
require('tailwindcss'),
|
||||
require('postcss-preset-env')({ stage: 1 })
|
||||
]
|
||||
}
|
||||
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
443
prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
28
prisma/migrations/20220210104005_redis_aol/migration.sql
Normal file
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
47
prisma/migrations/20220217211304_dualcerts/migration.sql
Normal file
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
19
prisma/migrations/20220219231255_prmr_secrets/migration.sql
Normal file
19
prisma/migrations/20220219231255_prmr_secrets/migration.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isPRMRSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Secret" ("applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value") SELECT "applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value" FROM "Secret";
|
||||
DROP TABLE "Secret";
|
||||
ALTER TABLE "new_Secret" RENAME TO "Secret";
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_isPRMRSecret_key" ON "Secret"("name", "applicationId", "isPRMRSecret");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
2
prisma/migrations/20220301101928_proxyhash/migration.sql
Normal file
2
prisma/migrations/20220301101928_proxyhash/migration.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyHash" TEXT;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServiceSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
CONSTRAINT "ServiceSecret_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServiceSecret_name_serviceId_key" ON "ServiceSecret"("name", "serviceId");
|
||||
19
prisma/migrations/20220311213422_autodeploy/migration.sql
Normal file
19
prisma/migrations/20220311213422_autodeploy/migration.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "phpModules" TEXT;
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationPersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_key" ON "ApplicationPersistentStorage"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_path_key" ON "ApplicationPersistentStorage"("path");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user