mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
959 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31d7e7e806 | ||
|
|
e740788d6c | ||
|
|
928d53e532 | ||
|
|
87ba4560ad | ||
|
|
9137e8bc32 | ||
|
|
35bd2b23d5 | ||
|
|
10a514d9ac | ||
|
|
71096acdff | ||
|
|
07da696397 | ||
|
|
41baf150c2 | ||
|
|
f0a52b2ef4 | ||
|
|
54e83fdff1 | ||
|
|
46327ff2fc | ||
|
|
8b26acc841 | ||
|
|
b90cb5a731 | ||
|
|
cd9b642c5e | ||
|
|
41a928d41b | ||
|
|
1388bee62c | ||
|
|
8ebc778d40 | ||
|
|
3a59091b41 | ||
|
|
e764c4651c | ||
|
|
10f04d2177 | ||
|
|
119f994b50 | ||
|
|
e39541c318 | ||
|
|
cf88885c94 | ||
|
|
1192346ce3 | ||
|
|
0e3bd85847 | ||
|
|
edeb6c6965 | ||
|
|
138fd5cb6d | ||
|
|
155410bd44 | ||
|
|
20bd829c2e | ||
|
|
7b7e222946 | ||
|
|
98d901d06c | ||
|
|
4e862cda6f | ||
|
|
4e940807ae | ||
|
|
b081743f54 | ||
|
|
34bb9f301f | ||
|
|
ed8a6daeea | ||
|
|
9e81ab43ac | ||
|
|
32d94cbe97 | ||
|
|
46a83aa457 | ||
|
|
08d7593ca9 | ||
|
|
a50f7a7cc2 | ||
|
|
2c33447f9f | ||
|
|
d67a3f51ec | ||
|
|
2719974262 | ||
|
|
eb5aebd58d | ||
|
|
98dbf3d8a5 | ||
|
|
d9489a2cb4 | ||
|
|
95832d34f7 | ||
|
|
d3e9aea63d | ||
|
|
d6972e2ed1 | ||
|
|
50844e98be | ||
|
|
5c6fcfebf9 | ||
|
|
84cfe6fb42 | ||
|
|
abf0aeb2a8 | ||
|
|
a7aca0ce8b | ||
|
|
67bb5d973b | ||
|
|
662948d622 | ||
|
|
f5bedfdf7f | ||
|
|
db9db61d92 | ||
|
|
d255cb1973 | ||
|
|
6529271de2 | ||
|
|
0dd32b5319 | ||
|
|
b032da798b | ||
|
|
a1a9f1531e | ||
|
|
f71b54deb2 | ||
|
|
c63430e342 | ||
|
|
6821b128ad | ||
|
|
3f8d44a01c | ||
|
|
3aef04437c | ||
|
|
53e32c038b | ||
|
|
1660510614 | ||
|
|
69f5601b3e | ||
|
|
6e22fecc98 | ||
|
|
d18b2b6a1f | ||
|
|
4b0370ac08 | ||
|
|
750ef80777 | ||
|
|
59c62923be | ||
|
|
68b220d06e | ||
|
|
250ea64203 | ||
|
|
0ab57396d2 | ||
|
|
1e36856e65 | ||
|
|
cfdc8db543 | ||
|
|
1f25bc411f | ||
|
|
972f77c790 | ||
|
|
795f99bb47 | ||
|
|
54f7142b2b | ||
|
|
26eacfc2c0 | ||
|
|
e2bf02841f | ||
|
|
6a59b8d27c | ||
|
|
7fc43ef2bb | ||
|
|
70a3fc247e | ||
|
|
56ab8312f1 | ||
|
|
6fb6a514ac | ||
|
|
b01f5f47b3 | ||
|
|
ebdd3601b3 | ||
|
|
c0d711170b | ||
|
|
da86f0076b | ||
|
|
9d8551a9be | ||
|
|
01ea86479d | ||
|
|
eb62888c39 | ||
|
|
b006fe8f68 | ||
|
|
dc3add495c | ||
|
|
59086e9eb4 | ||
|
|
e563988596 | ||
|
|
5a206a140c | ||
|
|
dbf910ff38 | ||
|
|
35b31dce2b | ||
|
|
1ec620be4b | ||
|
|
8516ac671a | ||
|
|
3b7fdebe8c | ||
|
|
17ac3048ac | ||
|
|
4e43efef50 | ||
|
|
4f4f5b1c01 | ||
|
|
1fa5c5e021 | ||
|
|
436e0e3a2b | ||
|
|
e717c1d599 | ||
|
|
ae5d90eb47 | ||
|
|
c095cb58b3 | ||
|
|
6bba37c36d | ||
|
|
60a428a952 | ||
|
|
c376123877 | ||
|
|
cd3663038f | ||
|
|
16b7c1708b | ||
|
|
3435f92fcb | ||
|
|
cef571b8cc | ||
|
|
242bc61e2d | ||
|
|
c917135bd3 | ||
|
|
3802158ad5 | ||
|
|
e452f68614 | ||
|
|
9586213dd1 | ||
|
|
30781f218c | ||
|
|
697c42ff66 | ||
|
|
37d8f1847c | ||
|
|
2af13fff55 | ||
|
|
51e8ca8de0 | ||
|
|
06228cd2a7 | ||
|
|
0033baafdc | ||
|
|
79dfc6a660 | ||
|
|
972b0fa811 | ||
|
|
ad51a9ebc8 | ||
|
|
51a40d049d | ||
|
|
8b3113bd92 | ||
|
|
d6b6938555 | ||
|
|
ce52608f19 | ||
|
|
ede37d296b | ||
|
|
6374b1284b | ||
|
|
6ac8dd8907 | ||
|
|
24c655d7ef | ||
|
|
1f087cc29a | ||
|
|
c3684a1650 | ||
|
|
a410fd0776 | ||
|
|
271fb1358d | ||
|
|
a4d53a28eb | ||
|
|
e69e32f6c7 | ||
|
|
650409dde3 | ||
|
|
f3f4bb5105 | ||
|
|
9c02af6b52 | ||
|
|
6a3f4ba171 | ||
|
|
6a6426fe6b | ||
|
|
21256746c3 | ||
|
|
c34d643f95 | ||
|
|
0be402af82 | ||
|
|
b5b0b6524d | ||
|
|
22f1a3c908 | ||
|
|
fa5f439858 | ||
|
|
7cc760eecf | ||
|
|
af0652f6b2 | ||
|
|
9e009bebaa | ||
|
|
8e53ae3484 | ||
|
|
7ceb8f1537 | ||
|
|
b0eae8cfe9 | ||
|
|
febef372b8 | ||
|
|
a18e3659aa | ||
|
|
e2e342851a | ||
|
|
bee3292088 | ||
|
|
f56d4dbbb3 | ||
|
|
eccd7c96d7 | ||
|
|
4046c472ed | ||
|
|
0da4a1024a | ||
|
|
aa2f328640 | ||
|
|
4d22b610b6 | ||
|
|
e91c3eab9c | ||
|
|
2e8fd6f0c7 | ||
|
|
90fde24b40 | ||
|
|
02a1f50776 | ||
|
|
57b97a9204 | ||
|
|
1ec03693d3 | ||
|
|
4246d86694 | ||
|
|
2cce1f8459 | ||
|
|
3937cfec53 | ||
|
|
259aeeb67a | ||
|
|
9d53bc0926 | ||
|
|
1211f3c9fd | ||
|
|
c07d6aa702 | ||
|
|
4f662dbf21 | ||
|
|
a4301c5d23 | ||
|
|
86b7824c78 | ||
|
|
435f063c36 | ||
|
|
902a764ff2 | ||
|
|
4097378847 | ||
|
|
5f3567e808 | ||
|
|
7325353ced | ||
|
|
68f5b32876 | ||
|
|
8d4eaad920 | ||
|
|
4b38865cc9 | ||
|
|
030cb124e5 | ||
|
|
fd363ec017 | ||
|
|
8b813fb07a | ||
|
|
326f0dac1b | ||
|
|
828faaf2b1 | ||
|
|
9582664406 | ||
|
|
ec5474b72b | ||
|
|
62d1011d9f | ||
|
|
0a7ec6bd20 | ||
|
|
b84c37cd8f | ||
|
|
887d65e512 | ||
|
|
3543a9c809 | ||
|
|
40da3ff9fe | ||
|
|
2315192f4b | ||
|
|
0faa1540f4 | ||
|
|
00cab67e73 | ||
|
|
b92bc9eebb | ||
|
|
1905db16e8 | ||
|
|
3e9cf7285b | ||
|
|
6fdbc572fe | ||
|
|
f94e17134e | ||
|
|
3fd50ebb12 | ||
|
|
40cbee0d75 | ||
|
|
0eb7f4526e | ||
|
|
646d92757a | ||
|
|
51efa01b11 | ||
|
|
dc4a63ef92 | ||
|
|
1b717ac091 | ||
|
|
e93d97f2bc | ||
|
|
45c904e876 | ||
|
|
880865f1f2 | ||
|
|
8e42203b89 | ||
|
|
2bd91fa970 | ||
|
|
a3fd95020d | ||
|
|
e5b1ce4eef | ||
|
|
531973baab | ||
|
|
b6e6a1ccf1 | ||
|
|
1140afe2c9 | ||
|
|
f8f17832de | ||
|
|
caaf030517 | ||
|
|
106aee31bd | ||
|
|
c98ed5338a | ||
|
|
48fa4ff245 | ||
|
|
d75d2880e5 | ||
|
|
ec907b0ce4 | ||
|
|
2cda0b22c2 | ||
|
|
a0076db42e | ||
|
|
a37cf49c2a | ||
|
|
c4833c3cc2 | ||
|
|
d03fbd9224 | ||
|
|
5998212b82 | ||
|
|
62ccab22d6 | ||
|
|
5ccea1cfcc | ||
|
|
8ccb1bd34c | ||
|
|
c1a48dcf1e | ||
|
|
11d74c0c1f | ||
|
|
8290ee856f | ||
|
|
08332c8321 | ||
|
|
046f738b7d | ||
|
|
07708155ac | ||
|
|
df5e23c7c2 | ||
|
|
41adc02801 | ||
|
|
72b650b086 | ||
|
|
06fe3f33c0 | ||
|
|
cbabf7fc51 | ||
|
|
6aeafda604 | ||
|
|
30d656698e | ||
|
|
94d1af01df | ||
|
|
af97d399b6 | ||
|
|
2f90fd1fe6 | ||
|
|
c05a140b0b | ||
|
|
cbfb9a3844 | ||
|
|
5a227f70c6 | ||
|
|
44a102443d | ||
|
|
cf7fdf198d | ||
|
|
68f2f4f978 | ||
|
|
029b623f08 | ||
|
|
fe3702847a | ||
|
|
e9b852a30e | ||
|
|
1d4e5df5a2 | ||
|
|
5e14b72fe4 | ||
|
|
8ebff72cde | ||
|
|
e16643c48c | ||
|
|
65c8f55ee6 | ||
|
|
fbc81ab3eb | ||
|
|
a4d56fd79a | ||
|
|
ce45cb8aca | ||
|
|
7f8428cd17 | ||
|
|
14d79031c1 | ||
|
|
b8aa7b6d08 | ||
|
|
397ca7f20e | ||
|
|
e10b76a46b | ||
|
|
b46566280d | ||
|
|
3ab6a231eb | ||
|
|
2bc2ae9b6e | ||
|
|
2b28f8bd8f | ||
|
|
dcdac29135 | ||
|
|
591ee29e0d | ||
|
|
625e71ab08 | ||
|
|
b0af54587b | ||
|
|
be3080df08 | ||
|
|
04685c9f9d | ||
|
|
1a83f2635f | ||
|
|
630aa45c87 | ||
|
|
0c3a381d1f | ||
|
|
ffac7c5c87 | ||
|
|
410800e81c | ||
|
|
9481beb61f | ||
|
|
141f2481a7 | ||
|
|
ea18f25adc | ||
|
|
9018184747 | ||
|
|
4fc2dd55f5 | ||
|
|
5ef9a282eb | ||
|
|
93a6518974 | ||
|
|
07aa285b27 | ||
|
|
bf01e9e29f | ||
|
|
d70672ba4b | ||
|
|
5eeb519ed6 | ||
|
|
5f047e4adf | ||
|
|
56b9a376bd | ||
|
|
0a1d31a188 | ||
|
|
64c9fb9a1b | ||
|
|
47aad15cd5 | ||
|
|
260a47a366 | ||
|
|
fd4bbe17f0 | ||
|
|
25ff637703 | ||
|
|
f571453696 | ||
|
|
5cd7533972 | ||
|
|
3a252509d0 | ||
|
|
2bd3802a6f | ||
|
|
ce2757f514 | ||
|
|
8419cdf604 | ||
|
|
907c2414ae | ||
|
|
f82207564f | ||
|
|
991a09838c | ||
|
|
25df4bfd85 | ||
|
|
d2f89d001b | ||
|
|
1971f227fd | ||
|
|
c1adffe260 | ||
|
|
e725887a55 | ||
|
|
5bf79b75b0 | ||
|
|
6926975e40 | ||
|
|
978a01c968 | ||
|
|
f421f5ee84 | ||
|
|
383831c7b8 | ||
|
|
41329facf7 | ||
|
|
7d3c644148 | ||
|
|
7fab9b5930 | ||
|
|
58763ef84c | ||
|
|
0e6abf172b | ||
|
|
9e681ece41 | ||
|
|
28f87a306d | ||
|
|
23e8833208 | ||
|
|
03962663c2 | ||
|
|
cc2ec55c4d | ||
|
|
ff2c38aa16 | ||
|
|
b5a9a2cea8 | ||
|
|
cd3f661f7e | ||
|
|
41bf6b5b86 | ||
|
|
a4e7c85184 | ||
|
|
19aca9ab35 | ||
|
|
08704c289a | ||
|
|
2224c22c6e | ||
|
|
b281889acd | ||
|
|
cfc50a27b0 | ||
|
|
ed5f21da6a | ||
|
|
78f3eb81dd | ||
|
|
6a833934ce | ||
|
|
45bf6f77d1 | ||
|
|
a1b3b7b687 | ||
|
|
7ebcad6abb | ||
|
|
fed6d2bf07 | ||
|
|
bea4943e9f | ||
|
|
1979e431b8 | ||
|
|
9bead1d6b4 | ||
|
|
56c4295e16 | ||
|
|
7c7b5a61e5 | ||
|
|
abaa13fda8 | ||
|
|
042bfeddbb | ||
|
|
f45ab067ce | ||
|
|
97a6f04aaa | ||
|
|
27f1e1d7cd | ||
|
|
417c01d6e0 | ||
|
|
b2e7435d0f | ||
|
|
73c9cb1d51 | ||
|
|
41c5dd3b53 | ||
|
|
bb0c93dc2f | ||
|
|
7953c1df30 | ||
|
|
c3f4245164 | ||
|
|
369001febb | ||
|
|
7ec296be6b | ||
|
|
d2f5a58f3b | ||
|
|
f4315144af | ||
|
|
e92775887d | ||
|
|
a5f1b4b675 | ||
|
|
8f3f9ebade | ||
|
|
157e5fd7aa | ||
|
|
5e7e1c11c7 | ||
|
|
e8516bc831 | ||
|
|
e3f78a1cf9 | ||
|
|
3449e0f8fc | ||
|
|
66af12f9b5 | ||
|
|
13acf09dcc | ||
|
|
ce71dccbc1 | ||
|
|
d9ba1a0b5c | ||
|
|
0b709c93a8 | ||
|
|
1657e5a151 | ||
|
|
a165b21950 | ||
|
|
0d0715a340 | ||
|
|
1bd33fea98 | ||
|
|
76754ded79 | ||
|
|
4da27a46a2 | ||
|
|
039953588e | ||
|
|
b8b4f559db | ||
|
|
2b0df270df | ||
|
|
b96c1a23ec | ||
|
|
f779b3bb54 | ||
|
|
6462982d12 | ||
|
|
84b4cc5d54 | ||
|
|
1bd2ccbc16 | ||
|
|
3abe1610bf | ||
|
|
61716738ed | ||
|
|
4e819f6eba | ||
|
|
fedb38f2bc | ||
|
|
aae108032c | ||
|
|
020013683b | ||
|
|
70de2538e2 | ||
|
|
9f581c82a9 | ||
|
|
eb2e07afc5 | ||
|
|
9c47b8495c | ||
|
|
2f8d0ee60c | ||
|
|
5bf14f4639 | ||
|
|
9da08d600b | ||
|
|
4d47eab07c | ||
|
|
f2061c5c25 | ||
|
|
430fc66ed7 | ||
|
|
bcb84b8126 | ||
|
|
dd83e86bc3 | ||
|
|
3e8a8364dc | ||
|
|
be41c0dd02 | ||
|
|
a17b7a564e | ||
|
|
f3cdda29bc | ||
|
|
de37ee9f1c | ||
|
|
8212868b92 | ||
|
|
b44d8578d9 | ||
|
|
0358cf2de2 | ||
|
|
94da008a47 | ||
|
|
456b1b8074 | ||
|
|
78e6a7d1d3 | ||
|
|
76dc7ffb68 | ||
|
|
211aff7170 | ||
|
|
bcacefb841 | ||
|
|
4505ad37d8 | ||
|
|
18cf57f33c | ||
|
|
9f2f5b40c3 | ||
|
|
8a401f50cb | ||
|
|
51a5b3b602 | ||
|
|
68f9bca054 | ||
|
|
e9e92c6e9e | ||
|
|
008cfdba09 | ||
|
|
9973197fa5 | ||
|
|
ec3b94cf96 | ||
|
|
c4cb92c78d | ||
|
|
c390f82246 | ||
|
|
b4f98e24a1 | ||
|
|
e042c5cfde | ||
|
|
faeae8fd6c | ||
|
|
ae4942ba29 | ||
|
|
fd652bfce6 | ||
|
|
3d72167721 | ||
|
|
ba284bef9e | ||
|
|
d18bb9cc74 | ||
|
|
a7ed3e58db | ||
|
|
8405ebd28d | ||
|
|
352bb65125 | ||
|
|
fe2cc5a99a | ||
|
|
7a2f29f6a3 | ||
|
|
9a05bfa899 | ||
|
|
39fa64e20d | ||
|
|
3a835b420e | ||
|
|
82f7633c3a | ||
|
|
9fdac2741a | ||
|
|
8fb5260809 | ||
|
|
e08ec12d26 | ||
|
|
1202e00a21 | ||
|
|
4ba2205af4 | ||
|
|
09841ad4cb | ||
|
|
d2dcd0abc8 | ||
|
|
fe9d0503fb | ||
|
|
8e9e6607e5 | ||
|
|
e1efd9355f | ||
|
|
ca705bbf89 | ||
|
|
b70fe09d17 | ||
|
|
d7d570393f | ||
|
|
41ca265e5a | ||
|
|
03cde08d67 | ||
|
|
5684674bd7 | ||
|
|
4fe919f2ea | ||
|
|
c8c23c53ef | ||
|
|
b1c25e98d7 | ||
|
|
7ab5a4bfcf | ||
|
|
a3ee57995c | ||
|
|
32020fd336 | ||
|
|
f1313b6468 | ||
|
|
3ef093c7e6 | ||
|
|
f5dfaa81d3 | ||
|
|
fcf206a081 | ||
|
|
9790d2b613 | ||
|
|
c39cb42601 | ||
|
|
0ead17ab70 | ||
|
|
4a6062522e | ||
|
|
201fa82efc | ||
|
|
d28433ee64 | ||
|
|
cc348bf0f5 | ||
|
|
b023d65fcf | ||
|
|
bd15d85732 | ||
|
|
b4bbd22781 | ||
|
|
d4c972584a | ||
|
|
edef4bd4a0 | ||
|
|
448611039c | ||
|
|
305fab488e | ||
|
|
38f0546f05 | ||
|
|
8cb679711d | ||
|
|
4d11867500 | ||
|
|
8232a7468b | ||
|
|
03e7af12be | ||
|
|
39f2e28a11 | ||
|
|
53947d805b | ||
|
|
15f8e44237 | ||
|
|
5ce1bc1ec5 | ||
|
|
c36bd34a1a | ||
|
|
781d034484 | ||
|
|
e4f701b148 | ||
|
|
5160d0780e | ||
|
|
8cd561b8cc | ||
|
|
b8b57bc48b | ||
|
|
58406f055e | ||
|
|
b049297082 | ||
|
|
a284928352 | ||
|
|
fe787538e3 | ||
|
|
360fb5ea37 | ||
|
|
45af5cbef8 | ||
|
|
463dacbe59 | ||
|
|
13891110ce | ||
|
|
01e0fb70c9 | ||
|
|
c1c25d59c8 | ||
|
|
6ac54e17f4 | ||
|
|
a82805846f | ||
|
|
a53bda1436 | ||
|
|
6309074844 | ||
|
|
b80e0d15fb | ||
|
|
7a0d151467 | ||
|
|
c55505af6c | ||
|
|
a788b7bc13 | ||
|
|
5f27fc0770 | ||
|
|
b814c6e563 | ||
|
|
8f58b14629 | ||
|
|
269250ef3d | ||
|
|
a3241516cb | ||
|
|
943300509b | ||
|
|
92d1f5aa55 | ||
|
|
7a74ba1796 | ||
|
|
614eb923d8 | ||
|
|
066f5b25e0 | ||
|
|
18f7ab1b95 | ||
|
|
78293340cc | ||
|
|
c47457a17f | ||
|
|
d00629b627 | ||
|
|
ddfbda6f80 | ||
|
|
d910b21185 | ||
|
|
b60b832426 | ||
|
|
adfc976f41 | ||
|
|
1b43976ff0 | ||
|
|
321fb019eb | ||
|
|
f6858a68e0 | ||
|
|
741db1778b | ||
|
|
809f40dec9 | ||
|
|
f3b5de4697 | ||
|
|
fe17e2eaba | ||
|
|
22ef0b5d29 | ||
|
|
823279fb60 | ||
|
|
19f661706d | ||
|
|
986c5b7133 | ||
|
|
4e334d4fff | ||
|
|
dcf7f92aab | ||
|
|
f56361c0ca | ||
|
|
4946ca2d91 | ||
|
|
f6a91cb53c | ||
|
|
726fbbb52a | ||
|
|
29d2278579 | ||
|
|
72ceeff022 | ||
|
|
54d65ec011 | ||
|
|
96aef5c4a6 | ||
|
|
7b64166fb0 | ||
|
|
1f5908e0b8 | ||
|
|
a4562d18b6 | ||
|
|
875e232199 | ||
|
|
80f95a4674 | ||
|
|
17d56aa972 | ||
|
|
f4ba60cf8f | ||
|
|
0b8a648f13 | ||
|
|
2576a3af2c | ||
|
|
2e6c73fa3c | ||
|
|
b8d8ee4560 | ||
|
|
d9b74ada84 | ||
|
|
01b058151b | ||
|
|
989d952f35 | ||
|
|
908af3e024 | ||
|
|
819157fda1 | ||
|
|
5a4458e93f | ||
|
|
1fbd403f34 | ||
|
|
098e519c55 | ||
|
|
3ef4a242f9 | ||
|
|
ad3044dce1 | ||
|
|
e40541d831 | ||
|
|
2786e7dbaf | ||
|
|
196d681a63 | ||
|
|
d2353e3c35 | ||
|
|
2475031f88 | ||
|
|
cd15e68adc | ||
|
|
27431f779d | ||
|
|
b9b5a2faeb | ||
|
|
e471b11d3b | ||
|
|
a742a3d2e3 | ||
|
|
c615f6c07e | ||
|
|
a6ebfb08f7 | ||
|
|
2b0d162226 | ||
|
|
2c5f09a8bb | ||
|
|
ef073e586b | ||
|
|
82bfdb87e3 | ||
|
|
767e7b80cb | ||
|
|
8d26ea9063 | ||
|
|
1a7c4310d0 | ||
|
|
4e8fe79e2b | ||
|
|
a8c5551292 | ||
|
|
2bf73109b2 | ||
|
|
f0ab3750bd | ||
|
|
58a11e37fe | ||
|
|
927bf46304 | ||
|
|
6b89857697 | ||
|
|
b72e5ccef6 | ||
|
|
6617b7811b | ||
|
|
e1c1988db4 | ||
|
|
af99ea4678 | ||
|
|
a6d5316090 | ||
|
|
f5e7a84fa6 | ||
|
|
c013764b61 | ||
|
|
2320ab0dfc | ||
|
|
1281a0f7e4 | ||
|
|
d8350cd4ee | ||
|
|
e3b7c23ed9 | ||
|
|
eae1ea21d6 | ||
|
|
541aa76b64 | ||
|
|
7b8555d524 | ||
|
|
fdf998c181 | ||
|
|
3d6b343adc | ||
|
|
e338cecc14 | ||
|
|
e5537a33fb | ||
|
|
35384deb68 | ||
|
|
547ca60c2a | ||
|
|
376f6f7455 | ||
|
|
abe92dedff | ||
|
|
4b521ceedc | ||
|
|
6dfcb9e52b | ||
|
|
335e3216e2 | ||
|
|
5b22bb4818 | ||
|
|
0097004882 | ||
|
|
1bc9e4c2d3 | ||
|
|
36c7e1a3c3 | ||
|
|
c6b4d04e26 | ||
|
|
fa6cf068c7 | ||
|
|
7c273a3a48 | ||
|
|
3de2ea1523 | ||
|
|
c5c9f84503 | ||
|
|
16ea9a3e07 | ||
|
|
48f952c798 | ||
|
|
f78ea5de07 | ||
|
|
5adbd5e784 | ||
|
|
5b2afa79d7 | ||
|
|
dc4e6d02b7 | ||
|
|
8ae61c8f78 | ||
|
|
684b8e0914 | ||
|
|
7c3314abae | ||
|
|
ab9f8ff356 | ||
|
|
892d8cd5c1 | ||
|
|
8b8b45778d | ||
|
|
6655fb182c | ||
|
|
0926d40247 | ||
|
|
ddc4d36688 | ||
|
|
53e1f22eb1 | ||
|
|
3d2a34737b | ||
|
|
ebde77008c | ||
|
|
3d27fd04ba | ||
|
|
d9fcaf3473 | ||
|
|
d266f761aa | ||
|
|
1d01405412 | ||
|
|
7c62eb5bd6 | ||
|
|
4dcc76d366 | ||
|
|
d2fad19a11 | ||
|
|
7c92c4c964 | ||
|
|
5a71d33236 | ||
|
|
1b4db4f793 | ||
|
|
c084b22815 | ||
|
|
acacef95cd | ||
|
|
5d722183d3 | ||
|
|
ac19ea5407 | ||
|
|
d19b05b970 | ||
|
|
a0795136ac | ||
|
|
d2566e345a | ||
|
|
66cd7cf90e | ||
|
|
9a599981ef | ||
|
|
f51f7bc82a | ||
|
|
dbcbac0137 | ||
|
|
e722f8a87c | ||
|
|
61679749eb | ||
|
|
23e12c9c44 | ||
|
|
6da78cd3e5 | ||
|
|
78ce8100a3 | ||
|
|
76ba338b45 | ||
|
|
823fe2deb2 | ||
|
|
cb90f692f2 | ||
|
|
0325343ede | ||
|
|
69d1556a1d | ||
|
|
2daa043840 | ||
|
|
f340ca9d05 | ||
|
|
02abd038fa | ||
|
|
b9da68ec28 | ||
|
|
88b3910d80 | ||
|
|
160412f6e4 | ||
|
|
59a86b25fc | ||
|
|
49e58b39f5 | ||
|
|
58e0757bbd | ||
|
|
5ff4197572 | ||
|
|
b56e28d27a | ||
|
|
c3d39e1dd4 | ||
|
|
716aa36bfd | ||
|
|
f01460170e | ||
|
|
a414ce282d | ||
|
|
6c32f3b130 | ||
|
|
4cf907c572 | ||
|
|
b28baaa5aa | ||
|
|
980dea64e0 | ||
|
|
c340f6436f | ||
|
|
54376fd105 | ||
|
|
ef006578b2 | ||
|
|
b0b1ee0c60 | ||
|
|
4e2026aa2d | ||
|
|
e0e50b4bd5 | ||
|
|
c9b52f1310 | ||
|
|
0195213dfb | ||
|
|
d6225cbde3 | ||
|
|
7b4c194b97 | ||
|
|
a5ecff24a3 | ||
|
|
c9c003dc9b | ||
|
|
fd95936219 | ||
|
|
15a3fd4456 | ||
|
|
df896542e4 | ||
|
|
8927e81274 | ||
|
|
340f061827 | ||
|
|
15cbac97c2 | ||
|
|
bb32d0f7d1 | ||
|
|
c370fba9ba | ||
|
|
6e32421172 | ||
|
|
6643687c0a | ||
|
|
ed01e78d77 | ||
|
|
93aed52f88 | ||
|
|
bb6d1fd6a3 | ||
|
|
6e33179fc2 | ||
|
|
277fd167cf | ||
|
|
98e8d5170b | ||
|
|
11ee1651ae | ||
|
|
0dfcf9b1e6 | ||
|
|
08f57ac5bc | ||
|
|
7095e781e9 | ||
|
|
df18b93809 | ||
|
|
0c2e028b38 | ||
|
|
80cb1bc129 | ||
|
|
74c1cb51f6 | ||
|
|
2e864bddf9 | ||
|
|
e60ae91b5d | ||
|
|
d606cd86a0 | ||
|
|
bc463c37f4 | ||
|
|
76c1480903 | ||
|
|
6f312caf8b | ||
|
|
980d8d374f | ||
|
|
c49b34942f | ||
|
|
fcfa8717a5 | ||
|
|
954a265965 | ||
|
|
69845a020a | ||
|
|
22200fd8a7 | ||
|
|
add441675d | ||
|
|
d3d9754277 | ||
|
|
aa5e2edbc5 | ||
|
|
310b099ecf | ||
|
|
1cfaef911c | ||
|
|
b931c5f638 | ||
|
|
7c683668eb | ||
|
|
cab7ac7d58 | ||
|
|
15e69c538a | ||
|
|
31ee938b66 | ||
|
|
e51a8d43d9 | ||
|
|
64cd5b6e4b | ||
|
|
6c9ef34905 | ||
|
|
aa89019236 | ||
|
|
df58fcee16 | ||
|
|
ea3ffc429f | ||
|
|
2efca7a2b5 | ||
|
|
9db448a5e2 | ||
|
|
feee90beef | ||
|
|
906a63b6b5 | ||
|
|
2ce64ac213 | ||
|
|
4d8bf57135 | ||
|
|
c5348ce4b3 | ||
|
|
7f87c03f97 | ||
|
|
9469f148ff | ||
|
|
ffb7dc4ec2 | ||
|
|
242b8fa746 | ||
|
|
50cae5ac3b | ||
|
|
6a71233eb2 | ||
|
|
1aff8933c9 | ||
|
|
0ed87a5dfc | ||
|
|
24a6bcbd1e | ||
|
|
ca7f3da19d | ||
|
|
bf047e2a3c | ||
|
|
4454287be9 | ||
|
|
3bd2183655 | ||
|
|
1f7080e8f8 | ||
|
|
8b20761e8b | ||
|
|
655d0b5d5f | ||
|
|
91849cdd3a | ||
|
|
df25a694c3 | ||
|
|
eabaca145e | ||
|
|
2f0e458765 | ||
|
|
ff8037f231 | ||
|
|
a116028e1b | ||
|
|
e606a02b29 | ||
|
|
531c712ea5 | ||
|
|
3ae7624361 | ||
|
|
fed83462fa | ||
|
|
58c9f937c5 | ||
|
|
5d14b9209d | ||
|
|
305a95fa74 | ||
|
|
b29c1e702a | ||
|
|
b04d75ab08 | ||
|
|
25abfaadb9 | ||
|
|
1df81b8698 | ||
|
|
4487846fd7 | ||
|
|
86918f5160 | ||
|
|
bc723b3f15 | ||
|
|
1881e646d4 | ||
|
|
aa98808a1a | ||
|
|
f9a2232703 | ||
|
|
19d6be8663 | ||
|
|
0eb7c890ad | ||
|
|
7bfa68aa58 | ||
|
|
857a38050e | ||
|
|
c5b7f92caf | ||
|
|
df31ffd7fb | ||
|
|
0df0322d36 | ||
|
|
260552322d | ||
|
|
88ef6496a2 | ||
|
|
bdf123bf7b | ||
|
|
8fc3760eef | ||
|
|
5656f6f709 | ||
|
|
53e7e8b77e | ||
|
|
b990915b7a | ||
|
|
15b7822ffd | ||
|
|
cfa28419cb | ||
|
|
30ef0d2a3a | ||
|
|
755f99200a | ||
|
|
7af79ed3a2 | ||
|
|
2971e14269 | ||
|
|
01954aaf30 | ||
|
|
da018a8f2a | ||
|
|
77400bbbb0 | ||
|
|
3c3333d3df | ||
|
|
4963bd4144 | ||
|
|
b4a418dded | ||
|
|
a724b0daee | ||
|
|
88aa620cb4 | ||
|
|
70d3448110 | ||
|
|
09a1a406a6 | ||
|
|
40939d0b7f | ||
|
|
aec1d184c8 | ||
|
|
69d3cb5dd8 | ||
|
|
3deff162bb | ||
|
|
4ed54568d3 | ||
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c |
16
.devcontainer/Dockerfile
Normal file
16
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node/.devcontainer/base.Dockerfile
|
||||
|
||||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||
ARG VARIANT="16-bullseye"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||
# ARG EXTRA_NODE_VERSION=10
|
||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||
|
||||
# [Optional] Uncomment if you want to install more global node modules
|
||||
RUN su node -c "npm install -g pnpm"
|
||||
28
.devcontainer/devcontainer.json
Normal file
28
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,28 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node
|
||||
{
|
||||
"name": "Node.js",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||
"args": {
|
||||
"VARIANT": "16-bullseye"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": ["dbaeumer.vscode-eslint", "svelte.svelte-vscode"],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"docker-in-docker": "20.10",
|
||||
"github-cli": "latest"
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,11 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/.svelte
|
||||
/build
|
||||
/functions
|
||||
.pnpm-store
|
||||
.pnpm-debug.log
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
@@ -1,35 +0,0 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
ko_fi: andrasbacsai
|
||||
open_collective: coollabsio
|
||||
|
||||
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: 🐞 Bug report
|
||||
description: Create a bug report to help us improve coolify
|
||||
title: "[Bug]: "
|
||||
labels: [Bug]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report! Please fill the form in English
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: A concise description of what you're experiencing and what you expect.
|
||||
placeholder: |
|
||||
When I do <X>, <Y> happens and I see the error message attached below:
|
||||
```...```
|
||||
What I expect is <Z>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Add steps to reproduce this behaviour, include console / network logs & videos
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: "The version of your coolify Instance"
|
||||
placeholder: "2.5.2"
|
||||
validations:
|
||||
required: true
|
||||
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: 🛠️ Feature request
|
||||
description: Suggest an idea to improve coolify
|
||||
title: '[Feature]: '
|
||||
labels: [Enhancement]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this feature request already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: One paragraph description of the feature.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Why should this be worked on?
|
||||
description: A concise description of the problems or use cases for this feature request.
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: 📝 Task
|
||||
description: Create a task for the team to work on
|
||||
title: "[Task]: "
|
||||
labels: [Task]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: SubTasks
|
||||
placeholder: |
|
||||
- Sub Task 1
|
||||
- Sub Task 2
|
||||
validations:
|
||||
required: false
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 🤔 Questions and Help
|
||||
url: https://discord.com/invite/6rDM4fkymF
|
||||
about: Reach out to us on discord or our github discussions page.
|
||||
- name: 🙋♂️ service request
|
||||
url: https://feedback.coolify.io/
|
||||
about: want to request a new service? for e.g wordpress, hasura, appwrite etc...
|
||||
39
.github/workflows/github-actions.yml
vendored
Normal file
39
.github/workflows/github-actions.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: release-coolify
|
||||
|
||||
on:
|
||||
release:
|
||||
types: published
|
||||
|
||||
jobs:
|
||||
make-it-coolifyed:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||
18
.gitignore
vendored
18
.gitignore
vendored
@@ -1,9 +1,11 @@
|
||||
/node_modules
|
||||
/.svelte
|
||||
/.svelte-kit
|
||||
/.pnpm-store
|
||||
/build
|
||||
/functions
|
||||
.env
|
||||
.DS_Store
|
||||
.pnpm-debug.log
|
||||
node_modules
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
9
.gitpod.yml
Normal file
9
.gitpod.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
# This configuration file was automatically generated by Gitpod.
|
||||
# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)
|
||||
# and commit this file to your remote git repository to share the goodness with others.
|
||||
|
||||
tasks:
|
||||
- init: npm install && npm run build
|
||||
command: npm run start
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
.svelte/**
|
||||
static/**
|
||||
build/**
|
||||
node_modules/**
|
||||
.svelte-kit/**
|
||||
11
.vscode/settings.json
vendored
Normal file
11
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.extract.ignoredByFiles": {
|
||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
||||
},
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
||||
"i18n-ally.extract.autoDetect": true
|
||||
}
|
||||
281
CONTRIBUTING.md
Normal file
281
CONTRIBUTING.md
Normal file
@@ -0,0 +1,281 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
Contribution guide is for v2, not applicable for v3
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
### Setup with github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup locally in your machine
|
||||
|
||||
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
#### Steps for local setup
|
||||
|
||||
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
2. Install dependencies with `pnpm install`.
|
||||
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
4. Seed the database with base entities with `pnpm db:seed`
|
||||
5. You can start coding after starting `pnpm dev`.
|
||||
|
||||
## 🧑💻 Developer contribution
|
||||
|
||||
### Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: Svelte / SvelteKit
|
||||
- **Database ORM**: Prisma.io
|
||||
- **Docker Engine**
|
||||
|
||||
### Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
### Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
|
||||
|
||||
---
|
||||
|
||||
# How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationshup with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Update global functions
|
||||
|
||||
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: Prisma.ServiceInclude = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
```js
|
||||
if (body.umami?.postgresqlPassword)
|
||||
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
|
||||
|
||||
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
### Create API endpoints.
|
||||
|
||||
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
|
||||
|
||||
#### `index.json.ts`:
|
||||
|
||||
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
|
||||
|
||||
```js
|
||||
import { getUserDetails } from '$lib/common';
|
||||
import * as db from '$lib/database';
|
||||
import { ErrorHandler } from '$lib/database';
|
||||
import type { RequestHandler } from '@sveltejs/kit';
|
||||
|
||||
export const post: RequestHandler = async (event) => {
|
||||
const { status, body } = await getUserDetails(event);
|
||||
if (status === 401) return { status, body };
|
||||
|
||||
const { id } = event.params;
|
||||
|
||||
let { name, fqdn } = await event.request.json();
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
|
||||
try {
|
||||
await db.updateService({ id, fqdn, name });
|
||||
return { status: 201 };
|
||||
} catch (error) {
|
||||
return ErrorHandler(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
If it's necessary, you can create your own database update function, specifically for the new service.
|
||||
|
||||
#### `start.json.ts`
|
||||
|
||||
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
|
||||
|
||||
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
|
||||
|
||||
#### `stop.json.ts`
|
||||
|
||||
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
|
||||
|
||||
## Frontend
|
||||
|
||||
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
2. You need to include it the logo at
|
||||
|
||||
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
|
||||
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
|
||||
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
|
||||
|
||||
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
|
||||
|
||||
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
|
||||
|
||||
## 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating!
|
||||
43
Dockerfile
Normal file
43
Dockerfile
Normal file
@@ -0,0 +1,43 @@
|
||||
FROM node:18-alpine as build
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
RUN pnpm build
|
||||
|
||||
# Production build
|
||||
FROM node:18-alpine
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV production
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:3.15 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
|
||||
COPY --from=build /app/apps/api/build/ .
|
||||
COPY --from=build /app/apps/ui/build/ ./public
|
||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||
COPY --from=build /app/apps/api/package.json .
|
||||
COPY --from=build /app/docker-compose.yaml .
|
||||
|
||||
RUN pnpm install -p
|
||||
|
||||
EXPOSE 3000
|
||||
CMD pnpm start
|
||||
82
README.md
82
README.md
@@ -1,28 +1,56 @@
|
||||
# Coolify
|
||||
|
||||
An open-source, hassle-free, self-hostable Heroku & Netlify alternative.
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
## Demo
|
||||
## Live Demo
|
||||
|
||||
[Small video](https://cdn.coollabs.io/assets/coolify/video/coolify.webm)
|
||||
https://demo.coolify.io/
|
||||
|
||||
## Installation
|
||||
(If it is unresponsive, that means someone overloaded the server. 😄)
|
||||
|
||||
## Feedback
|
||||
|
||||
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
## How to install
|
||||
|
||||
Installation is automated with the following command:
|
||||
|
||||
```bash
|
||||
/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
If you would like no questions during installation:
|
||||
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
## Features
|
||||
|
||||
You can deploy any of the following applications, databases and services easily.
|
||||
ARM support is in beta!
|
||||
|
||||
(constantly growing lists)
|
||||
### Git Sources
|
||||
|
||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
||||
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
### Destinations
|
||||
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
### Applications
|
||||
|
||||
With Github integration
|
||||
These are the predefined build packs, but with the Docker build pack, you can host anything that is hostable with a single Dockerfile.
|
||||
|
||||
- Static sites
|
||||
- NodeJS
|
||||
@@ -30,35 +58,59 @@ With Github integration
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- NextJS
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Laravel
|
||||
- Rust
|
||||
- or any custom dockerfile
|
||||
- Docker
|
||||
- Python
|
||||
- Deno
|
||||
|
||||
### Databases
|
||||
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
- MongoDB
|
||||
- MariaDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
### Services
|
||||
### One-click services
|
||||
|
||||
- [Plausible Analytics](https://plausible.io)
|
||||
You can host cool open-source services as well:
|
||||
|
||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 and v3 are not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.com/invite/bvS3WhR)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Roadmap
|
||||
## Contribute
|
||||
|
||||
[See the Roadmap here](https://github.com/coollabsio/coolify/projects/1)
|
||||
See [our contribution guide](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
|
||||
9
apps/api/.eslintignore
Normal file
9
apps/api/.eslintignore
Normal file
@@ -0,0 +1,9 @@
|
||||
seed.js
|
||||
.DS_Store
|
||||
node_modules
|
||||
build
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
dev.db
|
||||
11
apps/api/.eslintrc
Normal file
11
apps/api/.eslintrc
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint", "prettier"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"prettier"
|
||||
]
|
||||
}
|
||||
11
apps/api/.gitignore
vendored
Normal file
11
apps/api/.gitignore
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
dev.db
|
||||
client
|
||||
0
apps/api/.prettierignore
Normal file
0
apps/api/.prettierignore
Normal file
0
apps/api/db/.gitkeep
Normal file
0
apps/api/db/.gitkeep
Normal file
7
apps/api/nodemon.json
Normal file
7
apps/api/nodemon.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ignore": ["src/**/*.test.ts"],
|
||||
"ext": "ts,mjs,json,graphql",
|
||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\) | grep -v client/` --platform=node --outdir=build --format=cjs && node build",
|
||||
"legacyWatch": true
|
||||
}
|
||||
67
apps/api/package.json
Normal file
67
apps/api/package.json
Normal file
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"name": "coolify-api",
|
||||
"description": "Coolify's Fastify API",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:studio": "prisma studio",
|
||||
"dev": "nodemon",
|
||||
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.0.0",
|
||||
"@fastify/cookie": "7.0.0",
|
||||
"@fastify/cors": "8.0.0",
|
||||
"@fastify/env": "4.0.0",
|
||||
"@fastify/jwt": "6.2.0",
|
||||
"@fastify/static": "6.4.0",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.15.2",
|
||||
"axios": "0.27.2",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.1",
|
||||
"cabin": "9.1.2",
|
||||
"compare-versions": "4.1.3",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.3",
|
||||
"dockerode": "3.3.2",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"fastify": "4.2.0",
|
||||
"fastify-plugin": "3.0.1",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.1.0",
|
||||
"is-ip": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"node-forge": "1.3.1",
|
||||
"node-os-utils": "1.3.7",
|
||||
"p-queue": "7.2.0",
|
||||
"strip-ansi": "7.0.1",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.0.3",
|
||||
"@types/node-os-utils": "1.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.30.5",
|
||||
"@typescript-eslint/parser": "5.30.5",
|
||||
"esbuild": "0.14.48",
|
||||
"eslint": "8.19.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-prettier": "4.2.1",
|
||||
"nodemon": "2.0.19",
|
||||
"prettier": "2.7.1",
|
||||
"prisma": "3.15.2",
|
||||
"rimraf": "3.0.2",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "4.7.4"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.js"
|
||||
}
|
||||
}
|
||||
443
apps/api/prisma/migrations/20220131142425_init/migration.sql
Normal file
443
apps/api/prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isPRMRSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Secret" ("applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value") SELECT "applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value" FROM "Secret";
|
||||
DROP TABLE "Secret";
|
||||
ALTER TABLE "new_Secret" RENAME TO "Secret";
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_isPRMRSecret_key" ON "Secret"("name", "applicationId", "isPRMRSecret");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyHash" TEXT;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServiceSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
CONSTRAINT "ServiceSecret_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServiceSecret_name_serviceId_key" ON "ServiceSecret"("name", "serviceId");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "phpModules" TEXT;
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationPersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_key" ON "ApplicationPersistentStorage"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_path_key" ON "ApplicationPersistentStorage"("path");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Ghost" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"mariadbUser" TEXT NOT NULL,
|
||||
"mariadbPassword" TEXT NOT NULL,
|
||||
"mariadbRootUser" TEXT NOT NULL,
|
||||
"mariadbRootUserPassword" TEXT NOT NULL,
|
||||
"mariadbDatabase" TEXT,
|
||||
"mariadbPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Ghost_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Ghost_serviceId_key" ON "Ghost"("serviceId");
|
||||
@@ -0,0 +1,4 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonModule" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonVariable" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonWSGI" TEXT;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "MeiliSearch" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"masterKey" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "MeiliSearch_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "MeiliSearch_serviceId_key" ON "MeiliSearch"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,5 @@
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_path_key";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_applicationId_key";
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServicePersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerFileLocation" TEXT;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "denoMainFile" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "denoOptions" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "branch" TEXT;
|
||||
@@ -0,0 +1,17 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Umami" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"umamiAdminPassword" TEXT NOT NULL,
|
||||
"hashSalt" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Umami_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Umami_serviceId_key" ON "Umami"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "baseBuildImage" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "baseImage" TEXT;
|
||||
@@ -0,0 +1,16 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Hasura" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"graphQLAdminPassword" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Hasura_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Hasura_serviceId_key" ON "Hasura"("serviceId");
|
||||
@@ -0,0 +1,25 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT,
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT NOT NULL DEFAULT 'EU',
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Fider" ("createdAt", "emailMailgunApiKey", "emailMailgunDomain", "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt") SELECT "createdAt", "emailMailgunApiKey", "emailMailgunDomain", coalesce("emailMailgunRegion", 'EU') AS "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt" FROM "Fider";
|
||||
DROP TABLE "Fider";
|
||||
ALTER TABLE "new_Fider" RENAME TO "Fider";
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"scriptName" TEXT NOT NULL DEFAULT 'plausible.js',
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_PlausibleAnalytics" ("createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username") SELECT "createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username" FROM "PlausibleAnalytics";
|
||||
DROP TABLE "PlausibleAnalytics";
|
||||
ALTER TABLE "new_PlausibleAnalytics" RENAME TO "PlausibleAnalytics";
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
|
||||
"mysqlHost" TEXT,
|
||||
"mysqlPort" INTEGER,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Minio" ADD COLUMN "apiFqdn" TEXT;
|
||||
3
apps/api/prisma/migrations/migration_lock.toml
Normal file
3
apps/api/prisma/migrations/migration_lock.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# Please do not edit this file manually
|
||||
# It should be added in your version-control system (i.e. Git)
|
||||
provider = "sqlite"
|
||||
451
apps/api/prisma/schema.prisma
Normal file
451
apps/api/prisma/schema.prisma
Normal file
@@ -0,0 +1,451 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
binaryTargets = ["native", "linux-musl"]
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "sqlite"
|
||||
url = env("COOLIFY_DATABASE_URL")
|
||||
}
|
||||
|
||||
model Setting {
|
||||
id String @id @default(cuid())
|
||||
fqdn String? @unique
|
||||
isRegistrationEnabled Boolean @default(false)
|
||||
dualCerts Boolean @default(false)
|
||||
minPort Int @default(9000)
|
||||
maxPort Int @default(9100)
|
||||
proxyPassword String
|
||||
proxyUser String
|
||||
proxyHash String?
|
||||
isAutoUpdateEnabled Boolean @default(false)
|
||||
isDNSCheckEnabled Boolean @default(true)
|
||||
isTraefikUsed Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @unique @default(cuid())
|
||||
email String @unique
|
||||
type String
|
||||
password String?
|
||||
teams Team[]
|
||||
permission Permission[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Permission {
|
||||
id String @id @default(cuid())
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
userId String
|
||||
team Team @relation(fields: [teamId], references: [id])
|
||||
teamId String
|
||||
permission String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Team {
|
||||
id String @id @default(cuid())
|
||||
users User[]
|
||||
name String?
|
||||
applications Application[]
|
||||
gitSources GitSource[]
|
||||
gitHubApps GithubApp[]
|
||||
gitLabApps GitlabApp[]
|
||||
destinationDocker DestinationDocker[]
|
||||
permissions Permission[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database[] @relation(references: [id])
|
||||
databaseId String?
|
||||
service Service[] @relation(references: [id])
|
||||
serviceId String?
|
||||
}
|
||||
|
||||
model TeamInvitation {
|
||||
id String @id @default(cuid())
|
||||
uid String
|
||||
email String
|
||||
teamId String
|
||||
teamName String
|
||||
permission String
|
||||
createdAt DateTime @default(now())
|
||||
}
|
||||
|
||||
model Application {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String? @unique
|
||||
repository String?
|
||||
configHash String?
|
||||
branch String?
|
||||
buildPack String?
|
||||
projectId Int?
|
||||
port Int?
|
||||
exposePort Int?
|
||||
installCommand String?
|
||||
buildCommand String?
|
||||
startCommand String?
|
||||
baseDirectory String?
|
||||
publishDirectory String?
|
||||
phpModules String?
|
||||
pythonWSGI String?
|
||||
pythonModule String?
|
||||
pythonVariable String?
|
||||
dockerFileLocation String?
|
||||
denoMainFile String?
|
||||
denoOptions String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
settings ApplicationSettings?
|
||||
teams Team[]
|
||||
destinationDockerId String?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
gitSourceId String?
|
||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||
secrets Secret[]
|
||||
persistentStorage ApplicationPersistentStorage[]
|
||||
baseImage String?
|
||||
baseBuildImage String?
|
||||
}
|
||||
|
||||
model ApplicationSettings {
|
||||
id String @id @default(cuid())
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
applicationId String @unique
|
||||
dualCerts Boolean @default(false)
|
||||
debug Boolean @default(false)
|
||||
previews Boolean @default(false)
|
||||
autodeploy Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model ApplicationPersistentStorage {
|
||||
id String @id @default(cuid())
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
applicationId String
|
||||
path String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([applicationId, path])
|
||||
}
|
||||
|
||||
model ServicePersistentStorage {
|
||||
id String @id @default(cuid())
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
serviceId String
|
||||
path String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([serviceId, path])
|
||||
}
|
||||
|
||||
model Secret {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
value String
|
||||
isPRMRSecret Boolean @default(false)
|
||||
isBuildSecret Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
applicationId String
|
||||
|
||||
@@unique([name, applicationId, isPRMRSecret])
|
||||
}
|
||||
|
||||
model ServiceSecret {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
value String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
serviceId String
|
||||
|
||||
@@unique([name, serviceId])
|
||||
}
|
||||
|
||||
model BuildLog {
|
||||
id String @id @default(cuid())
|
||||
applicationId String?
|
||||
buildId String
|
||||
line String
|
||||
time Int
|
||||
}
|
||||
|
||||
model Build {
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
branch String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DestinationDocker {
|
||||
id String @id @default(cuid())
|
||||
network String @unique
|
||||
name String
|
||||
engine String
|
||||
remoteEngine Boolean @default(false)
|
||||
isCoolifyProxyUsed Boolean? @default(false)
|
||||
teams Team[]
|
||||
application Application[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database[]
|
||||
service Service[]
|
||||
}
|
||||
|
||||
model GitSource {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
teams Team[]
|
||||
type String?
|
||||
apiUrl String?
|
||||
htmlUrl String?
|
||||
organization String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
githubAppId String? @unique
|
||||
githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
|
||||
application Application[]
|
||||
gitlabAppId String? @unique
|
||||
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
|
||||
}
|
||||
|
||||
model GithubApp {
|
||||
id String @id @default(cuid())
|
||||
name String? @unique
|
||||
teams Team[]
|
||||
appId Int?
|
||||
installationId Int?
|
||||
clientId String?
|
||||
clientSecret String?
|
||||
webhookSecret String?
|
||||
privateKey String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
gitSource GitSource?
|
||||
}
|
||||
|
||||
model GitlabApp {
|
||||
id String @id @default(cuid())
|
||||
oauthId Int @unique
|
||||
groupName String? @unique
|
||||
teams Team[]
|
||||
deployKeyId Int?
|
||||
privateSshKey String?
|
||||
publicSshKey String?
|
||||
webhookToken String?
|
||||
appId String?
|
||||
appSecret String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
gitSource GitSource?
|
||||
}
|
||||
|
||||
model Database {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
settings DatabaseSettings?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
destinationDockerId String?
|
||||
teams Team[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DatabaseSettings {
|
||||
id String @id @default(cuid())
|
||||
database Database @relation(fields: [databaseId], references: [id])
|
||||
databaseId String @unique
|
||||
isPublic Boolean @default(false)
|
||||
appendOnly Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Service {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String?
|
||||
exposePort Int?
|
||||
dualCerts Boolean @default(false)
|
||||
type String?
|
||||
version String?
|
||||
teams Team[]
|
||||
destinationDockerId String?
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
plausibleAnalytics PlausibleAnalytics?
|
||||
minio Minio?
|
||||
vscodeserver Vscodeserver?
|
||||
wordpress Wordpress?
|
||||
ghost Ghost?
|
||||
serviceSecret ServiceSecret[]
|
||||
meiliSearch MeiliSearch?
|
||||
persistentStorage ServicePersistentStorage[]
|
||||
umami Umami?
|
||||
hasura Hasura?
|
||||
fider Fider?
|
||||
}
|
||||
|
||||
model PlausibleAnalytics {
|
||||
id String @id @default(cuid())
|
||||
email String?
|
||||
username String?
|
||||
password String
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
secretKeyBase String?
|
||||
scriptName String @default("plausible.js")
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Minio {
|
||||
id String @id @default(cuid())
|
||||
rootUser String
|
||||
rootUserPassword String
|
||||
publicPort Int?
|
||||
apiFqdn String?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Vscodeserver {
|
||||
id String @id @default(cuid())
|
||||
password String
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Wordpress {
|
||||
id String @id @default(cuid())
|
||||
extraConfig String?
|
||||
tablePrefix String?
|
||||
ownMysql Boolean @default(false)
|
||||
mysqlHost String?
|
||||
mysqlPort Int?
|
||||
mysqlUser String
|
||||
mysqlPassword String
|
||||
mysqlRootUser String
|
||||
mysqlRootUserPassword String
|
||||
mysqlDatabase String?
|
||||
mysqlPublicPort Int?
|
||||
ftpEnabled Boolean @default(false)
|
||||
ftpUser String?
|
||||
ftpPassword String?
|
||||
ftpPublicPort Int?
|
||||
ftpHostKey String?
|
||||
ftpHostKeyPrivate String?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Ghost {
|
||||
id String @id @default(cuid())
|
||||
defaultEmail String
|
||||
defaultPassword String
|
||||
mariadbUser String
|
||||
mariadbPassword String
|
||||
mariadbRootUser String
|
||||
mariadbRootUserPassword String
|
||||
mariadbDatabase String?
|
||||
mariadbPublicPort Int?
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model MeiliSearch {
|
||||
id String @id @default(cuid())
|
||||
masterKey String
|
||||
serviceId String @unique
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Umami {
|
||||
id String @id @default(cuid())
|
||||
serviceId String @unique
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
umamiAdminPassword String
|
||||
hashSalt String
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Hasura {
|
||||
id String @id @default(cuid())
|
||||
serviceId String @unique
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
graphQLAdminPassword String
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model Fider {
|
||||
id String @id @default(cuid())
|
||||
serviceId String @unique
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
jwtSecret String
|
||||
emailNoreply String?
|
||||
emailMailgunApiKey String?
|
||||
emailMailgunDomain String?
|
||||
emailMailgunRegion String @default("EU")
|
||||
emailSmtpHost String?
|
||||
emailSmtpPort Int?
|
||||
emailSmtpUser String?
|
||||
emailSmtpPassword String?
|
||||
emailSmtpEnableStartTls Boolean @default(false)
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
88
apps/api/prisma/seed.js
Normal file
88
apps/api/prisma/seed.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const dotEnvExtended = require('dotenv-extended');
|
||||
dotEnvExtended.load();
|
||||
const crypto = require('crypto');
|
||||
const generator = require('generate-password');
|
||||
const cuid = require('cuid');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
function generatePassword(length = 24) {
|
||||
return generator.generate({
|
||||
length,
|
||||
numbers: true,
|
||||
strict: true
|
||||
});
|
||||
}
|
||||
const algorithm = 'aes-256-ctr';
|
||||
|
||||
async function main() {
|
||||
// Enable registration for the first user
|
||||
// Set initial HAProxy password
|
||||
const settingsFound = await prisma.setting.findFirst({});
|
||||
if (!settingsFound) {
|
||||
await prisma.setting.create({
|
||||
data: {
|
||||
isRegistrationEnabled: true,
|
||||
proxyPassword: encrypt(generatePassword()),
|
||||
proxyUser: cuid()
|
||||
}
|
||||
});
|
||||
} else {
|
||||
await prisma.setting.update({
|
||||
where: {
|
||||
id: settingsFound.id
|
||||
},
|
||||
data: {
|
||||
isTraefikUsed: true,
|
||||
proxyHash: null
|
||||
}
|
||||
});
|
||||
}
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine: '/var/run/docker.sock' }
|
||||
});
|
||||
if (!localDocker) {
|
||||
await prisma.destinationDocker.create({
|
||||
data: {
|
||||
engine: '/var/run/docker.sock',
|
||||
name: 'Local Docker',
|
||||
isCoolifyProxyUsed: true,
|
||||
network: 'coolify'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Set auto-update based on env variable
|
||||
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
if (settings) {
|
||||
await prisma.setting.update({
|
||||
where: {
|
||||
id: settings.id
|
||||
},
|
||||
data: {
|
||||
isAutoUpdateEnabled
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
main()
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
const encrypt = (text) => {
|
||||
if (text) {
|
||||
const iv = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv);
|
||||
const encrypted = Buffer.concat([cipher.update(text), cipher.final()]);
|
||||
return JSON.stringify({
|
||||
iv: iv.toString('hex'),
|
||||
content: encrypted.toString('hex')
|
||||
});
|
||||
}
|
||||
};
|
||||
132
apps/api/src/index.ts
Normal file
132
apps/api/src/index.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import Fastify from 'fastify';
|
||||
import cors from '@fastify/cors';
|
||||
import serve from '@fastify/static';
|
||||
import env from '@fastify/env';
|
||||
import cookie from '@fastify/cookie';
|
||||
import path, { join } from 'path';
|
||||
import autoLoad from '@fastify/autoload';
|
||||
import { asyncExecShell, isDev, prisma } from './lib/common';
|
||||
import { scheduler } from './lib/scheduler';
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyInstance {
|
||||
config: {
|
||||
COOLIFY_APP_ID: string,
|
||||
COOLIFY_SECRET_KEY: string,
|
||||
COOLIFY_DATABASE_URL: string,
|
||||
COOLIFY_SENTRY_DSN: string,
|
||||
COOLIFY_IS_ON: string,
|
||||
COOLIFY_WHITE_LABELED: boolean,
|
||||
COOLIFY_WHITE_LABELED_ICON: string | null,
|
||||
COOLIFY_AUTO_UPDATE: boolean,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const port = isDev ? 3001 : 3000;
|
||||
const host = '0.0.0.0';
|
||||
const fastify = Fastify({
|
||||
logger: false
|
||||
});
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||
properties: {
|
||||
COOLIFY_APP_ID: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_SECRET_KEY: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_DATABASE_URL: {
|
||||
type: 'string',
|
||||
default: 'file:../db/dev.db'
|
||||
},
|
||||
COOLIFY_SENTRY_DSN: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_IS_ON: {
|
||||
type: 'string',
|
||||
default: 'docker'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
COOLIFY_WHITE_LABELED_ICON: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_AUTO_UPDATE: {
|
||||
type: 'boolean',
|
||||
default: false
|
||||
},
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
const options = {
|
||||
schema
|
||||
};
|
||||
fastify.register(env, options);
|
||||
if (!isDev) {
|
||||
fastify.register(serve, {
|
||||
root: path.join(__dirname, './public'),
|
||||
preCompressed: true
|
||||
});
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
}
|
||||
return reply.status(200).sendFile('index.html');
|
||||
});
|
||||
}
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'plugins')
|
||||
});
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'routes')
|
||||
});
|
||||
|
||||
fastify.register(cookie)
|
||||
fastify.register(cors);
|
||||
fastify.listen({ port, host }, async (err: any, address: any) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||
await initServer()
|
||||
await scheduler.start('deployApplication');
|
||||
await scheduler.start('cleanupStorage');
|
||||
await scheduler.start('checkProxies')
|
||||
|
||||
// Check if no build is running, try to autoupdate.
|
||||
setInterval(async () => {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
if (isAutoUpdateEnabled) {
|
||||
if (scheduler.workers.has('deployApplication')) {
|
||||
scheduler.workers.get('deployApplication').postMessage("status");
|
||||
}
|
||||
}
|
||||
}, 30000 * 10)
|
||||
|
||||
scheduler.on('worker deleted', async (name) => {
|
||||
if (name === 'autoUpdater') {
|
||||
await scheduler.start('deployApplication');
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
async function initServer() {
|
||||
try {
|
||||
await asyncExecShell(`docker network create --attachable coolify`);
|
||||
} catch (error) { }
|
||||
}
|
||||
|
||||
|
||||
43
apps/api/src/jobs/autoUpdater.ts
Normal file
43
apps/api/src/jobs/autoUpdater.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import axios from 'axios';
|
||||
import compareVersions from 'compare-versions';
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { asyncExecShell, asyncSleep, isDev, prisma, version } from '../lib/common';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
try {
|
||||
const currentVersion = version;
|
||||
const { data: versions } = await axios
|
||||
.get(
|
||||
`https://get.coollabs.io/versions.json`
|
||||
, {
|
||||
params: {
|
||||
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||
version: currentVersion
|
||||
}
|
||||
})
|
||||
const latestVersion = versions['coolify'].main.version;
|
||||
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||
if (isUpdateAvailable === 1) {
|
||||
const activeCount = 0
|
||||
if (activeCount === 0) {
|
||||
if (!isDev) {
|
||||
console.log(`Updating Coolify to ${latestVersion}.`);
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||
);
|
||||
} else {
|
||||
console.log('Updating (not really in dev mode).');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
} else process.exit(0);
|
||||
})();
|
||||
88
apps/api/src/jobs/checkProxies.ts
Normal file
88
apps/api/src/jobs/checkProxies.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, asyncExecShell } from '../lib/common';
|
||||
import { checkContainer, getEngine } from '../lib/docker';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
// Coolify Proxy
|
||||
const engine = '/var/run/docker.sock';
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify' }
|
||||
});
|
||||
if (localDocker && localDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer(engine, 'coolify-haproxy');
|
||||
const host = getEngine(engine);
|
||||
if (found) {
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker stop -t 0 coolify-haproxy && docker rm coolify-haproxy`
|
||||
);
|
||||
}
|
||||
await startTraefikProxy(engine);
|
||||
|
||||
}
|
||||
|
||||
// TCP Proxies
|
||||
const databasesWithPublicPort = await prisma.database.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
include: { settings: true, destinationDocker: true }
|
||||
});
|
||||
for (const database of databasesWithPublicPort) {
|
||||
const { destinationDockerId, destinationDocker, publicPort, id } = database;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
const { privatePort } = generateDatabaseConfiguration(database);
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer(engine, `haproxy-for-${publicPort}`);
|
||||
const host = getEngine(engine);
|
||||
if (found) {
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${publicPort} && docker rm haproxy-for-${publicPort}`
|
||||
);
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
|
||||
}
|
||||
}
|
||||
const wordpressWithFtp = await prisma.wordpress.findMany({
|
||||
where: { ftpPublicPort: { not: null } },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
for (const ftp of wordpressWithFtp) {
|
||||
const { service, ftpPublicPort } = ftp;
|
||||
const { destinationDockerId, destinationDocker, id } = service;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer(engine, `haproxy-for-${ftpPublicPort}`);
|
||||
const host = getEngine(engine);
|
||||
if (found) {
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${ftpPublicPort} && docker rm haproxy-for-${ftpPublicPort} `
|
||||
);
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP Proxies
|
||||
const minioInstances = await prisma.minio.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
for (const minio of minioInstances) {
|
||||
const { service, publicPort } = minio;
|
||||
const { destinationDockerId, destinationDocker, id } = service;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer(engine, `${id}-${publicPort}`);
|
||||
const host = getEngine(engine);
|
||||
if (found) {
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker stop -t 0 ${id}-${publicPort} && docker rm ${id}-${publicPort}`
|
||||
);
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
|
||||
}
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
} else process.exit(0);
|
||||
})();
|
||||
90
apps/api/src/jobs/cleanupStorage.ts
Normal file
90
apps/api/src/jobs/cleanupStorage.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { asyncExecShell, isDev, prisma, version } from '../lib/common';
|
||||
import { getEngine } from '../lib/docker';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
const destinationDockers = await prisma.destinationDocker.findMany();
|
||||
const engines = [...new Set(destinationDockers.map(({ engine }) => engine))];
|
||||
for (const engine of engines) {
|
||||
let lowDiskSpace = false;
|
||||
const host = getEngine(engine);
|
||||
// try {
|
||||
// let stdout = null
|
||||
// if (!isDev) {
|
||||
// const output = await asyncExecShell(
|
||||
// `DOCKER_HOST=${host} docker exec coolify sh -c 'df -kPT /'`
|
||||
// );
|
||||
// stdout = output.stdout;
|
||||
// } else {
|
||||
// const output = await asyncExecShell(
|
||||
// `df -kPT /`
|
||||
// );
|
||||
// stdout = output.stdout;
|
||||
// }
|
||||
// let lines = stdout.trim().split('\n');
|
||||
// let header = lines[0];
|
||||
// let regex =
|
||||
// /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||
// const boundaries = [];
|
||||
// let match;
|
||||
|
||||
// while ((match = regex.exec(header))) {
|
||||
// boundaries.push(match[0].length);
|
||||
// }
|
||||
|
||||
// boundaries[boundaries.length - 1] = -1;
|
||||
// const data = lines.slice(1).map((line) => {
|
||||
// const cl = boundaries.map((boundary) => {
|
||||
// const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||
// line = line.slice(boundary);
|
||||
// return column.trim();
|
||||
// });
|
||||
// return {
|
||||
// capacity: Number.parseInt(cl[5], 10) / 100
|
||||
// };
|
||||
// });
|
||||
// if (data.length > 0) {
|
||||
// const { capacity } = data[0];
|
||||
// if (capacity > 0.6) {
|
||||
// lowDiskSpace = true;
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.log(error);
|
||||
// }
|
||||
if (!isDev) {
|
||||
// Cleanup old coolify images
|
||||
try {
|
||||
let { stdout: images } = await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs `
|
||||
);
|
||||
images = images.trim();
|
||||
if (images) {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`);
|
||||
}
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
}
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`);
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
}
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f`);
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
}
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -a -f`);
|
||||
} catch (error) {
|
||||
//console.log(error);
|
||||
}
|
||||
} else {
|
||||
console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`);
|
||||
}
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
} else process.exit(0);
|
||||
})();
|
||||
352
apps/api/src/jobs/deployApplication.ts
Normal file
352
apps/api/src/jobs/deployApplication.ts
Normal file
@@ -0,0 +1,352 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
|
||||
import { asyncExecShell, createDirectories, decrypt, getDomain, prisma } from '../lib/common';
|
||||
import { dockerInstance, getEngine } from '../lib/docker';
|
||||
import * as importers from '../lib/importers';
|
||||
import * as buildpacks from '../lib/buildPacks';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
const concurrency = 1
|
||||
const PQueue = await import('p-queue');
|
||||
const queue = new PQueue.default({ concurrency });
|
||||
parentPort.on('message', async (message) => {
|
||||
if (parentPort) {
|
||||
if (message === 'error') throw new Error('oops');
|
||||
if (message === 'cancel') {
|
||||
parentPort.postMessage('cancelled');
|
||||
return;
|
||||
}
|
||||
if (message === 'status') {
|
||||
parentPort.postMessage({ size: queue.size, pending: queue.pending });
|
||||
return;
|
||||
}
|
||||
|
||||
await queue.add(async () => {
|
||||
const {
|
||||
id: applicationId,
|
||||
repository,
|
||||
name,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
gitSource,
|
||||
build_id: buildId,
|
||||
configHash,
|
||||
fqdn,
|
||||
projectId,
|
||||
secrets,
|
||||
phpModules,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
sourceBranch = null,
|
||||
settings,
|
||||
persistentStorage,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
exposePort,
|
||||
baseImage,
|
||||
baseBuildImage
|
||||
} = message
|
||||
let {
|
||||
branch,
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
} = message
|
||||
try {
|
||||
const { debug } = settings;
|
||||
if (concurrency === 1) {
|
||||
await prisma.build.updateMany({
|
||||
where: {
|
||||
status: { in: ['queued', 'running'] },
|
||||
id: { not: buildId },
|
||||
applicationId,
|
||||
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||
},
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
}
|
||||
let imageId = applicationId;
|
||||
let domain = getDomain(fqdn);
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
// Previews, we need to get the source branch and set subdomain
|
||||
if (pullmergeRequestId) {
|
||||
branch = sourceBranch;
|
||||
domain = `${pullmergeRequestId}.${domain}`;
|
||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||
}
|
||||
|
||||
let deployNeeded = true;
|
||||
let destinationType;
|
||||
|
||||
if (destinationDockerId) {
|
||||
destinationType = 'docker';
|
||||
}
|
||||
if (destinationType === 'docker') {
|
||||
const docker = dockerInstance({ destinationDocker });
|
||||
const host = getEngine(destinationDocker.engine);
|
||||
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||
const configuration = await setDefaultConfiguration(message);
|
||||
|
||||
buildPack = configuration.buildPack;
|
||||
port = configuration.port;
|
||||
installCommand = configuration.installCommand;
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
baseDirectory = configuration.baseDirectory;
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
denoMainFile = configuration.denoMainFile;
|
||||
const commit = await importers[gitSource.type]({
|
||||
applicationId,
|
||||
debug,
|
||||
workdir,
|
||||
repodir,
|
||||
githubAppId: gitSource.githubApp?.id,
|
||||
gitlabAppId: gitSource.gitlabApp?.id,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
apiUrl: gitSource.apiUrl,
|
||||
htmlUrl: gitSource.htmlUrl,
|
||||
projectId,
|
||||
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null
|
||||
});
|
||||
if (!commit) {
|
||||
throw new Error('No commit found?');
|
||||
}
|
||||
let tag = commit.slice(0, 7);
|
||||
if (pullmergeRequestId) {
|
||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||
}
|
||||
|
||||
try {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
if (!pullmergeRequestId) {
|
||||
const currentHash = crypto
|
||||
//@ts-ignore
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack,
|
||||
port,
|
||||
exposePort,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
secrets,
|
||||
branch,
|
||||
repository,
|
||||
fqdn
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
|
||||
if (configHash !== currentHash) {
|
||||
await prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
deployNeeded = true;
|
||||
if (configHash) {
|
||||
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
||||
}
|
||||
} else {
|
||||
deployNeeded = false;
|
||||
}
|
||||
} else {
|
||||
deployNeeded = true;
|
||||
}
|
||||
const image = await docker.engine.getImage(`${applicationId}:${tag}`);
|
||||
let imageFound = false;
|
||||
try {
|
||||
await image.inspect();
|
||||
imageFound = false;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
if (!imageFound || deployNeeded) {
|
||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||
if (buildpacks[buildPack])
|
||||
await buildpacks[buildPack]({
|
||||
buildId,
|
||||
applicationId,
|
||||
domain,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
docker,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage
|
||||
});
|
||||
else {
|
||||
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
||||
throw new Error(`Build pack ${buildPack} not found.`);
|
||||
}
|
||||
} else {
|
||||
await saveBuildLog({ line: 'Nothing changed.', buildId, applicationId });
|
||||
}
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${imageId}`);
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${imageId}`);
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const envs = [];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
const labels = makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
});
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
try {
|
||||
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[imageId]: {
|
||||
image: `${applicationId}:${tag}`,
|
||||
container_name: imageId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
networks: [docker.network],
|
||||
labels,
|
||||
depends_on: [],
|
||||
restart: 'always',
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
// logging: {
|
||||
// driver: 'fluentd',
|
||||
// },
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 3,
|
||||
window: '120s'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[docker.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker compose --project-directory ${workdir} up -d`
|
||||
);
|
||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
await prisma.build.update({
|
||||
where: { id: message.build_id },
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
throw new Error(error);
|
||||
}
|
||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
||||
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
|
||||
}
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
await prisma.build.update({
|
||||
where: { id: message.build_id },
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
});
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
});
|
||||
} else process.exit(0);
|
||||
})();
|
||||
714
apps/api/src/lib/buildPacks/common.ts
Normal file
714
apps/api/src/lib/buildPacks/common.ts
Normal file
@@ -0,0 +1,714 @@
|
||||
import { asyncExecShell, base64Encode, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
||||
import { scheduler } from "../scheduler";
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from "../dayjs";
|
||||
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
||||
const nodeBased = [
|
||||
'react',
|
||||
'preact',
|
||||
'vuejs',
|
||||
'svelte',
|
||||
'gatsby',
|
||||
'astro',
|
||||
'eleventy',
|
||||
'node',
|
||||
'nestjs',
|
||||
'nuxtjs',
|
||||
'nextjs'
|
||||
];
|
||||
|
||||
export function setDefaultBaseImage(buildPack: string | null) {
|
||||
const nodeVersions = [
|
||||
{
|
||||
value: 'node:lts',
|
||||
label: 'node:lts'
|
||||
},
|
||||
{
|
||||
value: 'node:18',
|
||||
label: 'node:18'
|
||||
},
|
||||
{
|
||||
value: 'node:17',
|
||||
label: 'node:17'
|
||||
},
|
||||
{
|
||||
value: 'node:16',
|
||||
label: 'node:16'
|
||||
},
|
||||
{
|
||||
value: 'node:14',
|
||||
label: 'node:14'
|
||||
},
|
||||
{
|
||||
value: 'node:12',
|
||||
label: 'node:12'
|
||||
}
|
||||
];
|
||||
const staticVersions = [
|
||||
{
|
||||
value: 'webdevops/nginx:alpine',
|
||||
label: 'webdevops/nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/apache:alpine',
|
||||
label: 'webdevops/apache:alpine'
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
{
|
||||
value: 'rust:latest',
|
||||
label: 'rust:latest'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60',
|
||||
label: 'rust:1.60'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-buster',
|
||||
label: 'rust:1.60-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-bullseye',
|
||||
label: 'rust:1.60-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-buster',
|
||||
label: 'rust:1.60-slim-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-bullseye',
|
||||
label: 'rust:1.60-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.14',
|
||||
label: 'rust:1.60-alpine3.14'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.15',
|
||||
label: 'rust:1.60-alpine3.15'
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0',
|
||||
label: 'webdevops/php-apache:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0',
|
||||
label: 'webdevops/php-nginx:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4',
|
||||
label: 'webdevops/php-apache:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4',
|
||||
label: 'webdevops/php-nginx:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3',
|
||||
label: 'webdevops/php-apache:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3',
|
||||
label: 'webdevops/php-nginx:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2',
|
||||
label: 'webdevops/php-apache:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2',
|
||||
label: 'webdevops/php-nginx:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1',
|
||||
label: 'webdevops/php-apache:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.1',
|
||||
label: 'webdevops/php-nginx:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.0',
|
||||
label: 'webdevops/php-apache:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.0',
|
||||
label: 'webdevops/php-nginx:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:5.6',
|
||||
label: 'webdevops/php-apache:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:5.6',
|
||||
label: 'webdevops/php-nginx:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0-alpine',
|
||||
label: 'webdevops/php-apache:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0-alpine',
|
||||
label: 'webdevops/php-nginx:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4-alpine',
|
||||
label: 'webdevops/php-apache:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4-alpine',
|
||||
label: 'webdevops/php-nginx:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3-alpine',
|
||||
label: 'webdevops/php-apache:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3-alpine',
|
||||
label: 'webdevops/php-nginx:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2-alpine',
|
||||
label: 'webdevops/php-apache:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2-alpine',
|
||||
label: 'webdevops/php-nginx:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1-alpine',
|
||||
label: 'webdevops/php-apache:7.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.1-alpine',
|
||||
label: 'webdevops/php-nginx:7.1-alpine'
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
{
|
||||
value: 'python:3.10-alpine',
|
||||
label: 'python:3.10-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-buster',
|
||||
label: 'python:3.10-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-bullseye',
|
||||
label: 'python:3.10-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-slim-bullseye',
|
||||
label: 'python:3.10-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-alpine',
|
||||
label: 'python:3.9-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-buster',
|
||||
label: 'python:3.9-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-bullseye',
|
||||
label: 'python:3.9-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-slim-bullseye',
|
||||
label: 'python:3.9-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-alpine',
|
||||
label: 'python:3.8-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-buster',
|
||||
label: 'python:3.8-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-bullseye',
|
||||
label: 'python:3.8-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-slim-bullseye',
|
||||
label: 'python:3.8-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-alpine',
|
||||
label: 'python:3.7-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-buster',
|
||||
label: 'python:3.7-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-bullseye',
|
||||
label: 'python:3.7-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-slim-bullseye',
|
||||
label: 'python:3.7-slim-bullseye'
|
||||
}
|
||||
];
|
||||
let payload: any = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
baseImages: [],
|
||||
baseBuildImages: []
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
payload.baseImage = 'node:lts';
|
||||
payload.baseImages = nodeVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = 'webdevops/nginx:alpine';
|
||||
payload.baseImages = staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'python') {
|
||||
payload.baseImage = 'python:3.10-alpine';
|
||||
payload.baseImages = pythonVersions;
|
||||
}
|
||||
if (buildPack === 'rust') {
|
||||
payload.baseImage = 'rust:latest';
|
||||
payload.baseBuildImage = 'rust:latest';
|
||||
payload.baseImages = rustVersions;
|
||||
payload.baseBuildImages = rustVersions;
|
||||
}
|
||||
if (buildPack === 'deno') {
|
||||
payload.baseImage = 'denoland/deno:latest';
|
||||
}
|
||||
if (buildPack === 'php') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseImages = phpVersions;
|
||||
}
|
||||
if (buildPack === 'laravel') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseBuildImage = 'node:18';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
|
||||
export const setDefaultConfiguration = async (data: any) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
//@ts-ignore
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3000;
|
||||
|
||||
if (buildPack === 'static') port = 80;
|
||||
else if (buildPack === 'node') port = 3000;
|
||||
else if (buildPack === 'php') port = 80;
|
||||
else if (buildPack === 'python') port = 8000;
|
||||
}
|
||||
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
installCommand = template?.installCommand || 'yarn install';
|
||||
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
startCommand = template?.startCommand || 'yarn start';
|
||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = 'main.ts';
|
||||
}
|
||||
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
|
||||
export const scanningTemplates = {
|
||||
'@sveltejs/kit': {
|
||||
buildPack: 'nodejs'
|
||||
},
|
||||
astro: {
|
||||
buildPack: 'astro'
|
||||
},
|
||||
'@11ty/eleventy': {
|
||||
buildPack: 'eleventy'
|
||||
},
|
||||
svelte: {
|
||||
buildPack: 'svelte'
|
||||
},
|
||||
'@nestjs/core': {
|
||||
buildPack: 'nestjs'
|
||||
},
|
||||
next: {
|
||||
buildPack: 'nextjs'
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: 'nuxtjs'
|
||||
},
|
||||
'react-scripts': {
|
||||
buildPack: 'react'
|
||||
},
|
||||
'parcel-bundler': {
|
||||
buildPack: 'static'
|
||||
},
|
||||
'@vue/cli-service': {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: 'gatsby'
|
||||
},
|
||||
'preact-cli': {
|
||||
buildPack: 'react'
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export const saveBuildLog = async ({
|
||||
line,
|
||||
buildId,
|
||||
applicationId
|
||||
}: {
|
||||
line: string;
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
}): Promise<any> => {
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export async function copyBaseConfigurationFiles(
|
||||
buildPack,
|
||||
workdir,
|
||||
buildId,
|
||||
applicationId,
|
||||
baseImage
|
||||
) {
|
||||
try {
|
||||
if (buildPack === 'php') {
|
||||
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
||||
await saveBuildLog({
|
||||
line: 'Copied default configuration file for PHP.',
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
} else if (staticApps.includes(buildPack) && baseImage.includes('nginx')) {
|
||||
await fs.writeFile(
|
||||
`${workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /docker.stdout;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /docker.stdout main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /app;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /app;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
||||
return (
|
||||
installCommand?.includes('pnpm') ||
|
||||
buildCommand?.includes('pnpm') ||
|
||||
startCommand?.includes('pnpm')
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
docker,
|
||||
buildId,
|
||||
isCache = false,
|
||||
debug = false,
|
||||
dockerFileLocation = '/Dockerfile'
|
||||
}) {
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
}
|
||||
if (!debug && isCache) {
|
||||
await saveBuildLog({
|
||||
line: `Debug turned off. To see more details, allow it in the configuration.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: workdir },
|
||||
{
|
||||
dockerfile: isCache ? `${dockerFileLocation}-cache` : dockerFileLocation,
|
||||
t: `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
||||
}
|
||||
);
|
||||
await streamEvents({ stream, docker, buildId, applicationId, debug });
|
||||
await saveBuildLog({ line: `Building image successful!`, buildId, applicationId });
|
||||
}
|
||||
|
||||
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress);
|
||||
function onFinished(err, res) {
|
||||
if (err) reject(err);
|
||||
resolve(res);
|
||||
}
|
||||
async function onProgress(event) {
|
||||
if (event.error) {
|
||||
reject(event.error);
|
||||
} else if (event.stream) {
|
||||
if (event.stream !== '\n') {
|
||||
if (debug)
|
||||
await saveBuildLog({
|
||||
line: `${event.stream.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
}) {
|
||||
if (pullmergeRequestId) {
|
||||
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
|
||||
const domain = getDomain(fqdn);
|
||||
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
||||
}
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.type=standalone-application`,
|
||||
`coolify.configuration=${base64Encode(
|
||||
JSON.stringify({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
})
|
||||
)}`
|
||||
];
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
docker,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
debug,
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||
}
|
||||
|
||||
export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
const { applicationId, tag, workdir, docker, buildId, debug, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||
Dockerfile.push(`COPY resources /app/resources`);
|
||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
docker,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
debug,
|
||||
secrets
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push('COPY . .');
|
||||
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||
}
|
||||
62
apps/api/src/lib/buildPacks/deno.ts
Normal file
62
apps/api/src/lib/buildPacks/deno.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
let depsFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/deps.ts`);
|
||||
depsFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (depsFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
|
||||
Dockerfile.push(`RUN deno cache deps.ts`);
|
||||
}
|
||||
Dockerfile.push(`COPY ${denoMainFile} /app`);
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
52
apps/api/src/lib/buildPacks/docker.ts
Normal file
52
apps/api/src/lib/buildPacks/docker.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage } from './common';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
debug,
|
||||
tag,
|
||||
workdir,
|
||||
docker,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
dockerFileLocation
|
||||
}) {
|
||||
try {
|
||||
const file = `${workdir}${dockerFileLocation}`;
|
||||
let dockerFileOut = `${workdir}`;
|
||||
if (baseDirectory) {
|
||||
dockerFileOut = `${workdir}${baseDirectory}`;
|
||||
workdir = `${workdir}${baseDirectory}`;
|
||||
}
|
||||
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8'))
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (
|
||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||
) {
|
||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
||||
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage({ applicationId, tag, workdir, docker, buildId, debug, dockerFileLocation });
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/api/src/lib/buildPacks/gatsby.ts
Normal file
28
apps/api/src/lib/buildPacks/gatsby.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -1,29 +1,37 @@
|
||||
import vuejs from './vuejs';
|
||||
import svelte from './svelte';
|
||||
import Static from './static';
|
||||
import rust from './rust';
|
||||
import react from './react';
|
||||
import php from './php';
|
||||
import nuxtjs from './nuxtjs';
|
||||
import nodejs from './nodejs';
|
||||
import nextjs from './nextjs';
|
||||
import nestjs from './nestjs';
|
||||
import gatsby from './gatsby';
|
||||
import node from './node';
|
||||
import staticApp from './static';
|
||||
import docker from './docker';
|
||||
import gatsby from './gatsby';
|
||||
import svelte from './svelte';
|
||||
import react from './react';
|
||||
import nestjs from './nestjs';
|
||||
import nextjs from './nextjs';
|
||||
import nuxtjs from './nuxtjs';
|
||||
import vuejs from './vuejs';
|
||||
import php from './php';
|
||||
import rust from './rust';
|
||||
import astro from './static';
|
||||
import eleventy from './static';
|
||||
import python from './python';
|
||||
import deno from './deno';
|
||||
import laravel from './laravel';
|
||||
|
||||
export {
|
||||
vuejs,
|
||||
svelte,
|
||||
Static as static,
|
||||
rust,
|
||||
react,
|
||||
php,
|
||||
nuxtjs,
|
||||
nodejs,
|
||||
nextjs,
|
||||
nestjs,
|
||||
gatsby,
|
||||
node,
|
||||
staticApp as static,
|
||||
docker,
|
||||
python
|
||||
gatsby,
|
||||
svelte,
|
||||
react,
|
||||
nestjs,
|
||||
nextjs,
|
||||
nuxtjs,
|
||||
vuejs,
|
||||
php,
|
||||
rust,
|
||||
astro,
|
||||
eleventy,
|
||||
python,
|
||||
deno,
|
||||
laravel
|
||||
};
|
||||
40
apps/api/src/lib/buildPacks/laravel.ts
Normal file
40
apps/api/src/lib/buildPacks/laravel.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageForLaravel, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { workdir, applicationId, tag, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
|
||||
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
|
||||
Dockerfile.push(`COPY --chown=application:application database/ database/`);
|
||||
Dockerfile.push(
|
||||
`RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||
);
|
||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
try {
|
||||
await buildCacheImageForLaravel(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
31
apps/api/src/lib/buildPacks/nestjs.ts
Normal file
31
apps/api/src/lib/buildPacks/nestjs.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = startCommand.includes('pnpm');
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
||||
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
58
apps/api/src/lib/buildPacks/nextjs.ts
Normal file
58
apps/api/src/lib/buildPacks/nextjs.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
buildId,
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
58
apps/api/src/lib/buildPacks/node.ts
Normal file
58
apps/api/src/lib/buildPacks/node.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
57
apps/api/src/lib/buildPacks/nuxtjs.ts
Normal file
57
apps/api/src/lib/buildPacks/nuxtjs.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
59
apps/api/src/lib/buildPacks/php.ts
Normal file
59
apps/api/src/lib/buildPacks/php.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
let composerFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/composer.json`);
|
||||
composerFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
|
||||
if (htaccessFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/.htaccess ./`);
|
||||
}
|
||||
if (composerFound) {
|
||||
Dockerfile.push(`RUN composer install`);
|
||||
}
|
||||
|
||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { workdir, baseDirectory, baseImage } = data;
|
||||
try {
|
||||
let htaccessFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/.htaccess`);
|
||||
htaccessFound = true;
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
await createDockerfile(data, baseImage, htaccessFound);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
76
apps/api/src/lib/buildPacks/python.ts
Normal file
76
apps/api/src/lib/buildPacks/python.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`RUN pip install gunicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`RUN pip install uvicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
||||
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.stat(`${workdir}${baseDirectory || ''}/requirements.txt`);
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/requirements.txt ./`);
|
||||
Dockerfile.push(`RUN pip install --no-cache-dir -r .${baseDirectory || ''}/requirements.txt`);
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(
|
||||
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
||||
);
|
||||
} else {
|
||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/api/src/lib/buildPacks/react.ts
Normal file
28
apps/api/src/lib/buildPacks/react.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
40
apps/api/src/lib/buildPacks/rust.ts
Normal file
40
apps/api/src/lib/buildPacks/rust.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import TOML from '@iarna/toml';
|
||||
import { asyncExecShell } from '../common';
|
||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
const { workdir, port, applicationId, tag, buildId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`);
|
||||
Dockerfile.push(`COPY . .`);
|
||||
Dockerfile.push(`RUN cargo build --release --bin ${name}`);
|
||||
Dockerfile.push('FROM debian:buster-slim');
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(
|
||||
`RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*`
|
||||
);
|
||||
Dockerfile.push(`RUN update-ca-certificates`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
|
||||
const parsedToml: any = TOML.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage, name);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
59
apps/api/src/lib/buildPacks/static.ts
Normal file
59
apps/api/src/lib/buildPacks/static.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
baseImage,
|
||||
buildId,
|
||||
port
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
} else {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
}
|
||||
if (baseImage.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
if (data.buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/api/src/lib/buildPacks/svelte.ts
Normal file
28
apps/api/src/lib/buildPacks/svelte.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/api/src/lib/buildPacks/vuejs.ts
Normal file
28
apps/api/src/lib/buildPacks/vuejs.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
1480
apps/api/src/lib/common.ts
Normal file
1480
apps/api/src/lib/common.ts
Normal file
File diff suppressed because it is too large
Load Diff
7
apps/api/src/lib/dayjs.ts
Normal file
7
apps/api/src/lib/dayjs.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import dayjs from 'dayjs';
|
||||
import utc from 'dayjs/plugin/utc.js';
|
||||
import relativeTime from 'dayjs/plugin/relativeTime.js';
|
||||
dayjs.extend(utc);
|
||||
dayjs.extend(relativeTime);
|
||||
|
||||
export { dayjs as day };
|
||||
78
apps/api/src/lib/docker.ts
Normal file
78
apps/api/src/lib/docker.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { asyncExecShell } from './common';
|
||||
import Dockerode from 'dockerode';
|
||||
export function getEngine(engine: string): string {
|
||||
return engine === '/var/run/docker.sock' ? 'unix:///var/run/docker.sock' : engine;
|
||||
}
|
||||
export function dockerInstance({ destinationDocker }): { engine: Dockerode; network: string } {
|
||||
return {
|
||||
engine: new Dockerode({
|
||||
socketPath: destinationDocker.engine
|
||||
}),
|
||||
network: destinationDocker.network
|
||||
};
|
||||
}
|
||||
|
||||
export async function checkContainer(engine: string, container: string, remove = false): Promise<boolean> {
|
||||
const host = getEngine(engine);
|
||||
let containerFound = false;
|
||||
|
||||
try {
|
||||
const { stdout } = await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker inspect --format '{{json .State}}' ${container}`
|
||||
);
|
||||
const parsedStdout = JSON.parse(stdout);
|
||||
const status = parsedStdout.Status;
|
||||
const isRunning = status === 'running';
|
||||
if (status === 'created') {
|
||||
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
|
||||
}
|
||||
if (remove && status === 'exited') {
|
||||
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
|
||||
}
|
||||
if (isRunning) {
|
||||
containerFound = true;
|
||||
}
|
||||
} catch (err) {
|
||||
// Container not found
|
||||
}
|
||||
return containerFound;
|
||||
}
|
||||
|
||||
export async function isContainerExited(engine: string, containerName: string): Promise<boolean> {
|
||||
let isExited = false;
|
||||
const host = getEngine(engine);
|
||||
try {
|
||||
const { stdout } = await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker inspect -f '{{.State.Status}}' ${containerName}`
|
||||
);
|
||||
if (stdout.trim() === 'exited') {
|
||||
isExited = true;
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
return isExited;
|
||||
}
|
||||
|
||||
export async function removeContainer({
|
||||
id,
|
||||
engine
|
||||
}: {
|
||||
id: string;
|
||||
engine: string;
|
||||
}): Promise<void> {
|
||||
const host = getEngine(engine);
|
||||
try {
|
||||
const { stdout } = await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
|
||||
);
|
||||
if (JSON.parse(stdout).Running) {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${id}`);
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${id}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
61
apps/api/src/lib/importers/github.ts
Normal file
61
apps/api/src/lib/importers/github.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { saveBuildLog } from '../buildPacks/common';
|
||||
import { asyncExecShell, decrypt, prisma } from '../common';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
githubAppId,
|
||||
repository,
|
||||
apiUrl,
|
||||
htmlUrl,
|
||||
branch,
|
||||
buildId
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
githubAppId: string;
|
||||
repository: string;
|
||||
apiUrl: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
}): Promise<string> {
|
||||
const { default: got } = await import('got')
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
||||
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body
|
||||
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = jsonwebtoken.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { token } = await got
|
||||
.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
.json();
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch} branch.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
39
apps/api/src/lib/importers/gitlab.ts
Normal file
39
apps/api/src/lib/importers/gitlab.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { saveBuildLog } from "../buildPacks/common";
|
||||
import { asyncExecShell } from "../common";
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
repodir,
|
||||
htmlUrl,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
repository: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
repodir: string;
|
||||
privateSshKey: string;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
||||
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch} branch.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
4
apps/api/src/lib/importers/index.ts
Normal file
4
apps/api/src/lib/importers/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import github from './github';
|
||||
import gitlab from './gitlab';
|
||||
|
||||
export { github, gitlab };
|
||||
44
apps/api/src/lib/scheduler.ts
Normal file
44
apps/api/src/lib/scheduler.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import Bree from 'bree';
|
||||
import path from 'path';
|
||||
import Cabin from 'cabin';
|
||||
import TSBree from '@breejs/ts-worker';
|
||||
import { isDev } from './common';
|
||||
|
||||
Bree.extend(TSBree);
|
||||
|
||||
const options: any = {
|
||||
defaultExtension: 'js',
|
||||
logger: false,
|
||||
workerMessageHandler: async ({ name, message }) => {
|
||||
if (name === 'deployApplication') {
|
||||
if (message.pending === 0) {
|
||||
if (!scheduler.workers.has('autoUpdater')) {
|
||||
await scheduler.stop('deployApplication');
|
||||
await scheduler.run('autoUpdater')
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
jobs: [
|
||||
{
|
||||
name: 'deployApplication'
|
||||
},
|
||||
{
|
||||
name: 'cleanupStorage',
|
||||
interval: '10m'
|
||||
},
|
||||
{
|
||||
name: 'checkProxies',
|
||||
interval: '10s'
|
||||
},
|
||||
{
|
||||
name: 'autoUpdater',
|
||||
}
|
||||
],
|
||||
};
|
||||
if (isDev) options.root = path.join(__dirname, '../jobs');
|
||||
|
||||
|
||||
export const scheduler = new Bree(options);
|
||||
|
||||
|
||||
407
apps/api/src/lib/serviceFields.ts
Normal file
407
apps/api/src/lib/serviceFields.ts
Normal file
@@ -0,0 +1,407 @@
|
||||
// Example:
|
||||
// export const nocodb = [{
|
||||
// name: 'postgreslUser',
|
||||
// isEditable: false,
|
||||
// isLowerCase: false,
|
||||
// isNumber: false,
|
||||
// isBoolean: false,
|
||||
// isEncrypted: false
|
||||
// }]
|
||||
|
||||
export const plausibleAnalytics = [{
|
||||
name: 'email',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},{
|
||||
name: 'username',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'password',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPublicPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'secretKeyBase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'scriptName',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
export const minio = [{
|
||||
name: 'apiFqdn',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},{
|
||||
name: 'rootUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'rootUserPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
export const vscodeserver = [{
|
||||
name: 'password',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
export const wordpress = [{
|
||||
name: 'extraConfig',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mysqlHost',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mysqlPort',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mysqlUser',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mysqlPassword',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mysqlRootUser',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mysqlRootUserPassword',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mysqlDatabase',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
export const ghost = [{
|
||||
name: 'defaultEmail',
|
||||
isEditable: false,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'defaultPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbRootUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbRootUserPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbDatabase',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
export const meiliSearch = [{
|
||||
name: 'masterKey',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
export const umami = [{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'umamiAdminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'hashSalt',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
export const hasura = [{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'graphQLAdminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
export const fider = [{
|
||||
name: 'jwtSecret',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},{
|
||||
name: 'postgreslUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailNoreply',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpHost',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPassword',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPort',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUser',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpEnableStartTls',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailMailgunApiKey',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailMailgunDomain',
|
||||
isEditable: true,
|
||||
isLowerCase: true,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailMailgunRegion',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
34
apps/api/src/plugins/jwt.ts
Normal file
34
apps/api/src/plugins/jwt.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import fp from 'fastify-plugin'
|
||||
import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt'
|
||||
|
||||
declare module "@fastify/jwt" {
|
||||
interface FastifyJWT {
|
||||
user: {
|
||||
userId: string,
|
||||
teamId: string,
|
||||
permission: string,
|
||||
isAdmin: boolean
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default fp<FastifyJWTOptions>(async (fastify, opts) => {
|
||||
fastify.register(fastifyJwt, {
|
||||
secret: fastify.config.COOLIFY_SECRET_KEY
|
||||
})
|
||||
|
||||
fastify.decorate("authenticate", async function (request, reply) {
|
||||
try {
|
||||
await request.jwtVerify()
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
reply.send(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
declare module 'fastify' {
|
||||
export interface FastifyInstance {
|
||||
authenticate(): Promise<void>
|
||||
}
|
||||
}
|
||||
871
apps/api/src/routes/api/v1/applications/handlers.ts
Normal file
871
apps/api/src/routes/api/v1/applications/handlers.ts
Normal file
@@ -0,0 +1,871 @@
|
||||
import cuid from 'cuid';
|
||||
import crypto from 'node:crypto'
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import axios from 'axios';
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
|
||||
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
|
||||
import { CheckDNS, DeleteApplication, DeployApplication, GetApplication, SaveApplication, SaveApplicationSettings } from '.';
|
||||
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||
import { asyncExecShell, checkDomainsIsValidInDNS, checkDoubleBranch, decrypt, encrypt, errorHandler, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||
import { checkContainer, dockerInstance, getEngine, isContainerExited, removeContainer } from '../../../../lib/docker';
|
||||
import { scheduler } from '../../../../lib/scheduler';
|
||||
|
||||
|
||||
export async function listApplications(request: FastifyRequest) {
|
||||
try {
|
||||
const { teamId } = request.user
|
||||
const applications = await prisma.application.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
const settings = await prisma.setting.findFirst()
|
||||
return {
|
||||
applications,
|
||||
settings
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getApplication(request: FastifyRequest<GetApplication>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
const appId = process.env['COOLIFY_APP_ID'];
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId && application.destinationDocker?.engine) {
|
||||
isRunning = await checkContainer(application.destinationDocker.engine, id);
|
||||
isExited = await isContainerExited(application.destinationDocker.engine, id);
|
||||
}
|
||||
return {
|
||||
isQueueActive: scheduler.workers.has('deployApplication'),
|
||||
isRunning,
|
||||
isExited,
|
||||
application,
|
||||
appId
|
||||
};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function newApplication(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const name = uniqueName();
|
||||
const { teamId } = request.user
|
||||
const { id } = await prisma.application.create({
|
||||
data: {
|
||||
name,
|
||||
teams: { connect: { id: teamId } },
|
||||
settings: { create: { debug: false, previews: false } }
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({ id });
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
function decryptApplication(application: any) {
|
||||
if (application) {
|
||||
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||
application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||
application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.privateKey) {
|
||||
application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null;
|
||||
}
|
||||
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||
application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null;
|
||||
}
|
||||
if (application?.secrets.length > 0) {
|
||||
application.secrets = application.secrets.map((s: any) => {
|
||||
s.value = decrypt(s.value) || null
|
||||
return s;
|
||||
});
|
||||
}
|
||||
|
||||
return application;
|
||||
}
|
||||
}
|
||||
export async function getApplicationFromDB(id: string, teamId: string) {
|
||||
try {
|
||||
let application = await prisma.application.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
settings: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
secrets: true,
|
||||
persistentStorage: true
|
||||
}
|
||||
});
|
||||
if (!application) {
|
||||
throw { status: 404, message: 'Application not found.' };
|
||||
}
|
||||
application = decryptApplication(application);
|
||||
const buildPack = application?.buildPack || null;
|
||||
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||
buildPack
|
||||
);
|
||||
|
||||
// Set default build images
|
||||
if (!application.baseImage) {
|
||||
application.baseImage = baseImage;
|
||||
}
|
||||
if (!application.baseBuildImage) {
|
||||
application.baseBuildImage = baseBuildImage;
|
||||
}
|
||||
return { ...application, baseBuildImages, baseImages };
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getApplicationFromDBWebhook(projectId: number, branch: string) {
|
||||
try {
|
||||
let application = await prisma.application.findFirst({
|
||||
where: { projectId, branch, settings: { autodeploy: true } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
settings: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
secrets: true,
|
||||
persistentStorage: true
|
||||
}
|
||||
});
|
||||
if (!application) {
|
||||
throw { status: 500, message: 'Application not configured.' }
|
||||
}
|
||||
application = decryptApplication(application);
|
||||
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||
application.buildPack
|
||||
);
|
||||
|
||||
// Set default build images
|
||||
if (!application.baseImage) {
|
||||
application.baseImage = baseImage;
|
||||
}
|
||||
if (!application.baseBuildImage) {
|
||||
application.baseBuildImage = baseBuildImage;
|
||||
}
|
||||
return { ...application, baseBuildImages, baseImages };
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let {
|
||||
name,
|
||||
buildPack,
|
||||
fqdn,
|
||||
port,
|
||||
exposePort,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage
|
||||
} = request.body
|
||||
|
||||
if (port) port = Number(port);
|
||||
if (exposePort) {
|
||||
exposePort = Number(exposePort);
|
||||
}
|
||||
if (denoOptions) denoOptions = denoOptions.trim();
|
||||
const defaultConfiguration = await setDefaultConfiguration({
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
});
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
fqdn,
|
||||
exposePort,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
...defaultConfiguration
|
||||
}
|
||||
});
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { debug, previews, dualCerts, autodeploy, branch, projectId } = request.body
|
||||
const isDouble = await checkDoubleBranch(branch, projectId);
|
||||
if (isDouble && autodeploy) {
|
||||
throw { status: 500, message: 'Application not configured.' }
|
||||
}
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { settings: { update: { debug, previews, dualCerts, autodeploy } } },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function stopApplication(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId && application.destinationDocker?.engine) {
|
||||
const { engine } = application.destinationDocker;
|
||||
const found = await checkContainer(engine, id);
|
||||
if (found) {
|
||||
await removeContainer({ id, engine });
|
||||
}
|
||||
}
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteApplication(request: FastifyRequest<DeleteApplication>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (application?.destinationDockerId && application.destinationDocker?.engine && application.destinationDocker?.network) {
|
||||
const host = getEngine(application.destinationDocker.engine);
|
||||
const { stdout: containers } = await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||
);
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = JSON.parse(container);
|
||||
const id = containerObj.ID;
|
||||
await removeContainer({ id, engine: application.destinationDocker.engine });
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.applicationSettings.deleteMany({ where: { application: { id } } });
|
||||
await prisma.buildLog.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.build.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.secret.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
|
||||
if (teamId === '0') {
|
||||
await prisma.application.deleteMany({ where: { id } });
|
||||
} else {
|
||||
await prisma.application.deleteMany({ where: { id, teams: { some: { id: teamId } } } });
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkDNS(request: FastifyRequest<CheckDNS>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
|
||||
let { exposePort, fqdn, forceSave, dualCerts } = request.body
|
||||
fqdn = fqdn.toLowerCase();
|
||||
|
||||
const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
|
||||
const found = await isDomainConfigured({ id, fqdn });
|
||||
if (found) {
|
||||
throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` }
|
||||
}
|
||||
if (exposePort) {
|
||||
exposePort = Number(exposePort);
|
||||
|
||||
if (exposePort < 1024 || exposePort > 65535) {
|
||||
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` }
|
||||
}
|
||||
const { default: getPort } = await import('get-port');
|
||||
const publicPort = await getPort({ port: exposePort });
|
||||
if (publicPort !== exposePort) {
|
||||
throw { status: 500, message: `Port ${exposePort} is already in use.` }
|
||||
}
|
||||
}
|
||||
if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||
return await checkDomainsIsValidInDNS({ hostname: request.hostname, fqdn, dualCerts });
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getUsage(request) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user?.teamId;
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
let usage = {};
|
||||
if (application.destinationDockerId) {
|
||||
[usage] = await Promise.all([getContainerUsage(application.destinationDocker.engine, id)]);
|
||||
}
|
||||
return {
|
||||
usage
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deployApplication(request: FastifyRequest<DeployApplication>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user?.teamId;
|
||||
|
||||
const { pullmergeRequestId = null, branch } = request.body
|
||||
const buildId = cuid();
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
if (application) {
|
||||
if (!application?.configHash) {
|
||||
const configHash = crypto.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack: application.buildPack,
|
||||
port: application.port,
|
||||
exposePort: application.exposePort,
|
||||
installCommand: application.installCommand,
|
||||
buildCommand: application.buildCommand,
|
||||
startCommand: application.startCommand
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
await prisma.application.update({ where: { id }, data: { configHash } });
|
||||
}
|
||||
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: id,
|
||||
branch: application.branch,
|
||||
destinationDockerId: application.destinationDocker?.id,
|
||||
gitSourceId: application.gitSource?.id,
|
||||
githubAppId: application.gitSource?.githubApp?.id,
|
||||
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'manual'
|
||||
}
|
||||
});
|
||||
if (pullmergeRequestId) {
|
||||
scheduler.workers.get('deployApplication').postMessage({
|
||||
build_id: buildId,
|
||||
type: 'manual',
|
||||
...application,
|
||||
sourceBranch: branch,
|
||||
pullmergeRequestId
|
||||
});
|
||||
} else {
|
||||
scheduler.workers.get('deployApplication').postMessage({
|
||||
build_id: buildId,
|
||||
type: 'manual',
|
||||
...application
|
||||
});
|
||||
|
||||
}
|
||||
return {
|
||||
buildId
|
||||
};
|
||||
}
|
||||
throw { status: 500, message: 'Application not found!' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function saveApplicationSource(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { gitSourceId } = request.body
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { gitSource: { connect: { id: gitSourceId } } }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getGitHubToken(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: application.gitSource.githubApp.appId
|
||||
};
|
||||
const githubToken = jsonwebtoken.sign(payload, application.gitSource.githubApp.privateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { data } = await axios.post(`${application.gitSource.apiUrl}/app/installations/${application.gitSource.githubApp.installationId}/access_tokens`, {}, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${githubToken}`
|
||||
}
|
||||
})
|
||||
return reply.code(201).send({
|
||||
token: data.token
|
||||
})
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkRepository(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { repository, branch } = request.query
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { gitSource: true }
|
||||
});
|
||||
const found = await prisma.application.findFirst({
|
||||
where: { branch, repository, gitSource: { type: application.gitSource.type }, id: { not: id } }
|
||||
});
|
||||
return {
|
||||
used: found ? true : false
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveRepository(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { repository, branch, projectId, autodeploy, webhookToken } = request.body
|
||||
|
||||
repository = repository.toLowerCase();
|
||||
branch = branch.toLowerCase();
|
||||
projectId = Number(projectId);
|
||||
if (webhookToken) {
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { repository, branch, projectId, gitSource: { update: { gitlabApp: { update: { webhookToken: webhookToken ? webhookToken : undefined } } } }, settings: { update: { autodeploy } } }
|
||||
});
|
||||
} else {
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { repository, branch, projectId, settings: { update: { autodeploy } } }
|
||||
});
|
||||
}
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { destinationId } = request.body
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { destinationDocker: { connect: { id: destinationId } } }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getBuildPack(request) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user?.teamId;
|
||||
const application = await getApplicationFromDB(id, teamId);
|
||||
return {
|
||||
type: application.gitSource.type,
|
||||
projectId: application.projectId,
|
||||
repository: application.repository,
|
||||
branch: application.branch,
|
||||
apiUrl: application.gitSource.apiUrl
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveBuildPack(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { buildPack } = request.body
|
||||
await prisma.application.update({ where: { id }, data: { buildPack } });
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSecrets(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let secrets = await prisma.secret.findMany({
|
||||
where: { applicationId: id },
|
||||
orderBy: { createdAt: 'desc' }
|
||||
});
|
||||
secrets = secrets.map((secret) => {
|
||||
secret.value = decrypt(secret.value);
|
||||
return secret;
|
||||
});
|
||||
secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
})
|
||||
return {
|
||||
secrets
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveSecret(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body
|
||||
|
||||
if (isNew) {
|
||||
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
|
||||
if (found) {
|
||||
throw { status: 500, message: `Secret ${name} already exists.` }
|
||||
} else {
|
||||
value = encrypt(value);
|
||||
await prisma.secret.create({
|
||||
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
value = encrypt(value);
|
||||
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
|
||||
|
||||
if (found) {
|
||||
await prisma.secret.updateMany({
|
||||
where: { applicationId: id, name, isPRMRSecret },
|
||||
data: { value, isBuildSecret, isPRMRSecret }
|
||||
});
|
||||
} else {
|
||||
await prisma.secret.create({
|
||||
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteSecret(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { name } = request.body
|
||||
await prisma.secret.deleteMany({ where: { applicationId: id, name } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getStorages(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const persistentStorages = await prisma.applicationPersistentStorage.findMany({ where: { applicationId: id } });
|
||||
return {
|
||||
persistentStorages
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveStorage(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { path, newStorage, storageId } = request.body
|
||||
|
||||
if (newStorage) {
|
||||
await prisma.applicationPersistentStorage.create({
|
||||
data: { path, application: { connect: { id } } }
|
||||
});
|
||||
} else {
|
||||
await prisma.applicationPersistentStorage.update({
|
||||
where: { id: storageId },
|
||||
data: { path }
|
||||
});
|
||||
}
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteStorage(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { path } = request.body
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id, path } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getPreviews(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
let secrets = await prisma.secret.findMany({
|
||||
where: { applicationId: id },
|
||||
orderBy: { createdAt: 'desc' }
|
||||
});
|
||||
secrets = secrets.map((secret) => {
|
||||
secret.value = decrypt(secret.value);
|
||||
return secret;
|
||||
});
|
||||
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
|
||||
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
|
||||
const destinationDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { application: { some: { id } }, teams: { some: { id: teamId } } }
|
||||
});
|
||||
const docker = dockerInstance({ destinationDocker });
|
||||
const listContainers = await docker.engine.listContainers({
|
||||
filters: { network: [destinationDocker.network], name: [id] }
|
||||
});
|
||||
const containers = listContainers.filter((container) => {
|
||||
return (
|
||||
container.Labels['coolify.configuration'] &&
|
||||
container.Labels['coolify.type'] === 'standalone-application'
|
||||
);
|
||||
});
|
||||
const jsonContainers = containers
|
||||
.map((container) =>
|
||||
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
|
||||
)
|
||||
.filter((container) => {
|
||||
return container.pullmergeRequestId && container.applicationId === id;
|
||||
});
|
||||
return {
|
||||
containers: jsonContainers,
|
||||
applicationSecrets: applicationSecrets.sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
}),
|
||||
PRMRSecrets: PRMRSecrets.sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
})
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getApplicationLogs(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { since = 0 } = request.query
|
||||
if (since !== 0) {
|
||||
since = day(since).unix();
|
||||
}
|
||||
const { destinationDockerId, destinationDocker } = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (destinationDockerId) {
|
||||
const docker = dockerInstance({ destinationDocker });
|
||||
try {
|
||||
const container = await docker.engine.getContainer(id);
|
||||
if (container) {
|
||||
const { default: ansi } = await import('strip-ansi')
|
||||
const logs = (
|
||||
await container.logs({
|
||||
stdout: true,
|
||||
stderr: true,
|
||||
timestamps: true,
|
||||
since,
|
||||
tail: 5000
|
||||
})
|
||||
)
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map((l) => ansi(l.slice(8)))
|
||||
.filter((a) => a);
|
||||
return {
|
||||
logs
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
logs: []
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getBuildLogs(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { buildId, skip = 0 } = request.query
|
||||
if (typeof skip !== 'number') {
|
||||
skip = Number(skip)
|
||||
}
|
||||
|
||||
let builds = [];
|
||||
|
||||
const buildCount = await prisma.build.count({ where: { applicationId: id } });
|
||||
if (buildId) {
|
||||
builds = await prisma.build.findMany({ where: { applicationId: id, id: buildId } });
|
||||
} else {
|
||||
builds = await prisma.build.findMany({
|
||||
where: { applicationId: id },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 5,
|
||||
skip
|
||||
});
|
||||
}
|
||||
|
||||
builds = builds.map((build) => {
|
||||
const updatedAt = day(build.updatedAt).utc();
|
||||
build.took = updatedAt.diff(day(build.createdAt)) / 1000;
|
||||
build.since = updatedAt.fromNow();
|
||||
return build;
|
||||
});
|
||||
return {
|
||||
builds,
|
||||
buildCount
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getBuildIdLogs(request: FastifyRequest) {
|
||||
try {
|
||||
const { id, buildId } = request.params
|
||||
let { sequence = 0 } = request.query
|
||||
if (typeof sequence !== 'number') {
|
||||
sequence = Number(sequence)
|
||||
}
|
||||
let logs = await prisma.buildLog.findMany({
|
||||
where: { buildId, time: { gt: sequence } },
|
||||
orderBy: { time: 'asc' }
|
||||
});
|
||||
const data = await prisma.build.findFirst({ where: { id: buildId } });
|
||||
return {
|
||||
logs,
|
||||
status: data?.status || 'queued'
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function getGitLabSSHKey(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { gitSource: { include: { gitlabApp: true } } }
|
||||
});
|
||||
return { publicKey: application.gitSource.gitlabApp.publicSshKey };
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveGitLabSSHKey(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { gitSource: { include: { gitlabApp: true } } }
|
||||
});
|
||||
if (!application.gitSource?.gitlabApp?.privateSshKey) {
|
||||
const keys = await generateSshKeyPair();
|
||||
const encryptedPrivateKey = encrypt(keys.privateKey);
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id: application.gitSource.gitlabApp.id },
|
||||
data: { privateSshKey: encryptedPrivateKey, publicSshKey: keys.publicKey }
|
||||
});
|
||||
return reply.code(201).send({ publicKey: keys.publicKey })
|
||||
}
|
||||
return { message: 'SSH key already exists' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveDeployKey(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { deployKeyId } = request.body;
|
||||
|
||||
deployKeyId = Number(deployKeyId);
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { gitSource: { include: { gitlabApp: true } } }
|
||||
});
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id: application.gitSource.gitlabApp.id },
|
||||
data: { deployKeyId }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function cancelDeployment(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { buildId, applicationId } = request.body;
|
||||
if (!buildId) {
|
||||
throw { status: 500, message: 'buildId is required' }
|
||||
|
||||
}
|
||||
await stopBuild(buildId, applicationId);
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
89
apps/api/src/routes/api/v1/applications/index.ts
Normal file
89
apps/api/src/routes/api/v1/applications/index.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { cancelDeployment, checkDNS, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication } from './handlers';
|
||||
|
||||
export interface GetApplication {
|
||||
Params: { id: string; }
|
||||
}
|
||||
|
||||
export interface SaveApplication {
|
||||
Params: { id: string; },
|
||||
Body: any
|
||||
}
|
||||
|
||||
export interface SaveApplicationSettings {
|
||||
Params: { id: string; };
|
||||
Querystring: { domain: string; };
|
||||
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; };
|
||||
}
|
||||
|
||||
export interface DeleteApplication {
|
||||
Params: { id: string; };
|
||||
Querystring: { domain: string; };
|
||||
}
|
||||
|
||||
export interface CheckDNS {
|
||||
Params: { id: string; };
|
||||
Querystring: { domain: string; };
|
||||
}
|
||||
|
||||
export interface DeployApplication {
|
||||
Params: { id: string },
|
||||
Querystring: { domain: string }
|
||||
Body: { pullmergeRequestId: string | null, branch: string }
|
||||
}
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listApplications(request));
|
||||
|
||||
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
|
||||
|
||||
fastify.get<GetApplication>('/:id', async (request) => await getApplication(request));
|
||||
fastify.post<SaveApplication>('/:id', async (request, reply) => await saveApplication(request, reply));
|
||||
fastify.delete<DeleteApplication>('/:id', async (request, reply) => await deleteApplication(request, reply));
|
||||
|
||||
fastify.post('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||
|
||||
fastify.post<SaveApplicationSettings>('/:id/settings', async (request, reply) => await saveApplicationSettings(request, reply));
|
||||
fastify.post<SaveApplicationSettings>('/:id/check', async (request) => await checkDNS(request));
|
||||
|
||||
fastify.get('/:id/secrets', async (request) => await getSecrets(request));
|
||||
fastify.post('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
|
||||
fastify.delete('/:id/secrets', async (request) => await deleteSecret(request));
|
||||
|
||||
fastify.get('/:id/storages', async (request) => await getStorages(request));
|
||||
fastify.post('/:id/storages', async (request, reply) => await saveStorage(request, reply));
|
||||
fastify.delete('/:id/storages', async (request) => await deleteStorage(request));
|
||||
|
||||
fastify.get('/:id/previews', async (request) => await getPreviews(request));
|
||||
|
||||
fastify.get('/:id/logs', async (request) => await getApplicationLogs(request));
|
||||
fastify.get('/:id/logs/build', async (request) => await getBuildLogs(request));
|
||||
fastify.get('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||
|
||||
fastify.get<DeployApplication>('/:id/usage', async (request) => await getUsage(request))
|
||||
|
||||
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
||||
fastify.post('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/source', async (request, reply) => await saveApplicationSource(request, reply));
|
||||
|
||||
fastify.get('/:id/configuration/repository', async (request) => await checkRepository(request));
|
||||
fastify.post('/:id/configuration/repository', async (request, reply) => await saveRepository(request, reply));
|
||||
fastify.post('/:id/configuration/destination', async (request, reply) => await saveDestination(request, reply));
|
||||
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||
|
||||
fastify.get('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||
fastify.post('/:id/configuration/sshkey', async (request, reply) => await saveGitLabSSHKey(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/deploykey', async (request, reply) => await saveDeployKey(request, reply));
|
||||
|
||||
|
||||
|
||||
fastify.get('/:id/configuration/githubToken', async (request, reply) => await getGitHubToken(request, reply));
|
||||
};
|
||||
|
||||
export default root;
|
||||
471
apps/api/src/routes/api/v1/databases/handlers.ts
Normal file
471
apps/api/src/routes/api/v1/databases/handlers.ts
Normal file
@@ -0,0 +1,471 @@
|
||||
import cuid from 'cuid';
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import yaml from 'js-yaml';
|
||||
import fs from 'fs/promises';
|
||||
import { asyncExecShell, ComposeFile, createDirectories, decrypt, encrypt, errorHandler, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePort, listSettings, makeLabelForStandaloneDatabase, prisma, startTcpProxy, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||
import { dockerInstance, getEngine } from '../../../../lib/docker';
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
|
||||
export async function listDatabases(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
let databases = []
|
||||
if (teamId === '0') {
|
||||
databases = await prisma.database.findMany({ include: { teams: true } });
|
||||
} else {
|
||||
databases = await prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
return {
|
||||
databases
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function newDatabase(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
|
||||
const name = uniqueName();
|
||||
const dbUser = cuid();
|
||||
const dbUserPassword = encrypt(generatePassword());
|
||||
const rootUser = cuid();
|
||||
const rootUserPassword = encrypt(generatePassword());
|
||||
const defaultDatabase = cuid();
|
||||
|
||||
const { id } = await prisma.database.create({
|
||||
data: {
|
||||
name,
|
||||
defaultDatabase,
|
||||
dbUser,
|
||||
dbUserPassword,
|
||||
rootUser,
|
||||
rootUserPassword,
|
||||
teams: { connect: { id: teamId } },
|
||||
settings: { create: { isPublic: false } }
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({ id })
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDatabase(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const teamId = request.user.teamId;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (!database) {
|
||||
throw { status: 404, message: 'Database not found.' }
|
||||
}
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
const { destinationDockerId, destinationDocker } = database;
|
||||
let isRunning = false;
|
||||
if (destinationDockerId) {
|
||||
const host = getEngine(destinationDocker.engine);
|
||||
|
||||
try {
|
||||
const { stdout } = await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
|
||||
);
|
||||
|
||||
if (JSON.parse(stdout).Running) {
|
||||
isRunning = true;
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
const configuration = generateDatabaseConfiguration(database);
|
||||
const settings = await listSettings();
|
||||
return {
|
||||
privatePort: configuration?.privatePort,
|
||||
database,
|
||||
isRunning,
|
||||
versions: await getDatabaseVersions(database.type),
|
||||
settings
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDatabaseTypes(request: FastifyRequest) {
|
||||
try {
|
||||
return {
|
||||
types: supportedDatabaseTypesAndVersions
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveDatabaseType(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { type } = request.body;
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: { type }
|
||||
});
|
||||
return reply.code(201).send({})
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getVersions(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const { type } = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
return {
|
||||
versions: supportedDatabaseTypesAndVersions.find((name) => name.name === type).versions
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveVersion(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { version } = request.body;
|
||||
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
version,
|
||||
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({})
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveDatabaseDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { destinationId } = request.body;
|
||||
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: { destinationDocker: { connect: { id: destinationId } } }
|
||||
});
|
||||
|
||||
const {
|
||||
destinationDockerId,
|
||||
destinationDocker: { engine },
|
||||
version,
|
||||
type
|
||||
} = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||
|
||||
if (destinationDockerId) {
|
||||
const host = getEngine(engine);
|
||||
if (type && version) {
|
||||
const baseImage = getDatabaseImage(type);
|
||||
asyncExecShell(`DOCKER_HOST=${host} docker pull ${baseImage}:${version}`);
|
||||
}
|
||||
}
|
||||
return reply.code(201).send({})
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDatabaseUsage(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const teamId = request.user.teamId;
|
||||
let usage = {};
|
||||
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
[usage] = await Promise.all([getContainerUsage(database.destinationDocker.engine, id)]);
|
||||
}
|
||||
return {
|
||||
usage
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function startDatabase(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
const {
|
||||
type,
|
||||
destinationDockerId,
|
||||
destinationDocker,
|
||||
publicPort,
|
||||
settings: { isPublic }
|
||||
} = database;
|
||||
const { privatePort, environmentVariables, image, volume, ulimits } =
|
||||
generateDatabaseConfiguration(database);
|
||||
|
||||
const network = destinationDockerId && destinationDocker.network;
|
||||
const host = getEngine(destinationDocker.engine);
|
||||
const engine = destinationDocker.engine;
|
||||
const volumeName = volume.split(':')[0];
|
||||
const labels = await makeLabelForStandaloneDatabase({ id, image, volume });
|
||||
|
||||
const { workdir } = await createDirectories({ repository: type, buildId: id });
|
||||
|
||||
const composeFile: ComposeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[id]: {
|
||||
container_name: id,
|
||||
image,
|
||||
networks: [network],
|
||||
environment: environmentVariables,
|
||||
volumes: [volume],
|
||||
ulimits,
|
||||
labels,
|
||||
restart: 'always',
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 3,
|
||||
window: '120s'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[volumeName]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
};
|
||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker volume create ${volumeName}`);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
try {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} up -d`);
|
||||
if (isPublic) await startTcpProxy(destinationDocker, id, publicPort, privatePort);
|
||||
return {};
|
||||
} catch (error) {
|
||||
throw {
|
||||
error
|
||||
};
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function stopDatabase(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
settings: { upsert: { update: { isPublic: false }, create: { isPublic: false } } }
|
||||
}
|
||||
});
|
||||
await prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||
return {};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDatabaseLogs(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
let { since = 0 } = request.query
|
||||
if (since !== 0) {
|
||||
since = day(since).unix();
|
||||
}
|
||||
const { destinationDockerId, destinationDocker } = await prisma.database.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (destinationDockerId) {
|
||||
const docker = dockerInstance({ destinationDocker });
|
||||
try {
|
||||
const container = await docker.engine.getContainer(id);
|
||||
if (container) {
|
||||
const { default: ansi } = await import('strip-ansi')
|
||||
const logs = (
|
||||
await container.logs({
|
||||
stdout: true,
|
||||
stderr: true,
|
||||
timestamps: true,
|
||||
since,
|
||||
tail: 5000
|
||||
})
|
||||
)
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map((l) => ansi(l.slice(8)))
|
||||
.filter((a) => a);
|
||||
return {
|
||||
logs
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
const { statusCode } = error;
|
||||
if (statusCode === 404) {
|
||||
return {
|
||||
logs: []
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
message: 'No logs found.'
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteDatabase(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.database.delete({ where: { id } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveDatabase(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const {
|
||||
name,
|
||||
defaultDatabase,
|
||||
dbUser,
|
||||
dbUserPassword,
|
||||
rootUser,
|
||||
rootUserPassword,
|
||||
version,
|
||||
isRunning
|
||||
} = request.body;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (isRunning) {
|
||||
if (database.dbUserPassword !== dbUserPassword) {
|
||||
await updatePasswordInDb(database, dbUser, dbUserPassword, false);
|
||||
} else if (database.rootUserPassword !== rootUserPassword) {
|
||||
await updatePasswordInDb(database, rootUser, rootUserPassword, true);
|
||||
}
|
||||
}
|
||||
const encryptedDbUserPassword = dbUserPassword && encrypt(dbUserPassword);
|
||||
const encryptedRootUserPassword = rootUserPassword && encrypt(rootUserPassword);
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
defaultDatabase,
|
||||
dbUser,
|
||||
dbUserPassword: encryptedDbUserPassword,
|
||||
rootUser,
|
||||
rootUserPassword: encryptedRootUserPassword,
|
||||
version
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({})
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveDatabaseSettings(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const { isPublic, appendOnly = true } = request.body;
|
||||
const publicPort = await getFreePort();
|
||||
const settings = await listSettings();
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: {
|
||||
settings: { upsert: { update: { isPublic, appendOnly }, create: { isPublic, appendOnly } } }
|
||||
}
|
||||
});
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
|
||||
const { destinationDockerId, destinationDocker, publicPort: oldPublicPort } = database;
|
||||
const { privatePort } = generateDatabaseConfiguration(database);
|
||||
|
||||
if (destinationDockerId) {
|
||||
if (isPublic) {
|
||||
await prisma.database.update({ where: { id }, data: { publicPort } });
|
||||
if (settings.isTraefikUsed) {
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
} else {
|
||||
await startTcpProxy(destinationDocker, id, publicPort, privatePort);
|
||||
}
|
||||
} else {
|
||||
await prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||
await stopTcpHttpProxy(id, destinationDocker, oldPublicPort);
|
||||
}
|
||||
}
|
||||
return { publicPort }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
32
apps/api/src/routes/api/v1/databases/index.ts
Normal file
32
apps/api/src/routes/api/v1/databases/index.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { deleteDatabase, getDatabase, getDatabaseLogs, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listDatabases(request));
|
||||
fastify.post('/new', async (request, reply) => await newDatabase(request, reply));
|
||||
|
||||
fastify.get('/:id', async (request) => await getDatabase(request));
|
||||
fastify.post('/:id', async (request, reply) => await saveDatabase(request, reply));
|
||||
fastify.delete('/:id', async (request) => await deleteDatabase(request));
|
||||
|
||||
fastify.post('/:id/settings', async (request) => await saveDatabaseSettings(request));
|
||||
|
||||
fastify.get('/:id/configuration/type', async (request) => await getDatabaseTypes(request));
|
||||
fastify.post('/:id/configuration/type', async (request, reply) => await saveDatabaseType(request, reply));
|
||||
|
||||
fastify.get('/:id/configuration/version', async (request) => await getVersions(request));
|
||||
fastify.post('/:id/configuration/version', async (request, reply) => await saveVersion(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/destination', async (request, reply) => await saveDatabaseDestination(request, reply));
|
||||
|
||||
fastify.get('/:id/usage', async (request) => await getDatabaseUsage(request));
|
||||
fastify.get('/:id/logs', async (request) => await getDatabaseLogs(request));
|
||||
|
||||
fastify.post('/:id/start', async (request) => await startDatabase(request));
|
||||
fastify.post('/:id/stop', async (request) => await stopDatabase(request));
|
||||
};
|
||||
|
||||
export default root;
|
||||
200
apps/api/src/routes/api/v1/destinations/handlers.ts
Normal file
200
apps/api/src/routes/api/v1/destinations/handlers.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { asyncExecShell, errorHandler, listSettings, prisma, startCoolifyProxy, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
||||
import { checkContainer, dockerInstance, getEngine } from '../../../../lib/docker';
|
||||
|
||||
export async function listDestinations(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
let destinations = []
|
||||
if (teamId === '0') {
|
||||
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
||||
} else {
|
||||
destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id: teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
return {
|
||||
destinations
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function checkDestination(request: FastifyRequest) {
|
||||
try {
|
||||
const { network } = request.body;
|
||||
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
||||
if (found) {
|
||||
throw {
|
||||
message: `Network already exists: ${network}`
|
||||
};
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDestination(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user?.teamId;
|
||||
const destination = await prisma.destinationDocker.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
if (!destination) {
|
||||
throw { status: 404, message: `Destination not found.` };
|
||||
}
|
||||
const settings = await listSettings();
|
||||
let payload = {
|
||||
destination,
|
||||
settings,
|
||||
state: false
|
||||
};
|
||||
|
||||
if (destination?.remoteEngine) {
|
||||
// const { stdout } = await asyncExecShell(
|
||||
// `ssh -p ${destination.port} ${destination.user}@${destination.ipAddress} "docker ps -a"`
|
||||
// );
|
||||
// console.log(stdout)
|
||||
// const engine = await generateRemoteEngine(destination);
|
||||
// // await saveSshKey(destination);
|
||||
// payload.state = await checkContainer(engine, 'coolify-haproxy');
|
||||
} else {
|
||||
let containerName = 'coolify-proxy';
|
||||
if (!settings.isTraefikUsed) {
|
||||
containerName = 'coolify-haproxy';
|
||||
}
|
||||
payload.state =
|
||||
destination?.engine && (await checkContainer(destination.engine, containerName));
|
||||
}
|
||||
return {
|
||||
...payload
|
||||
};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function newDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, network, engine, isCoolifyProxyUsed } = request.body
|
||||
const teamId = request.user.teamId;
|
||||
if (id === 'new') {
|
||||
const host = getEngine(engine);
|
||||
const docker = dockerInstance({ destinationDocker: { engine, network } });
|
||||
const found = await docker.engine.listNetworks({ filters: { name: [`^${network}$`] } });
|
||||
if (found.length === 0) {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
|
||||
}
|
||||
await prisma.destinationDocker.create({
|
||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||
});
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||
const destination = destinations.find((destination) => destination.network === network);
|
||||
|
||||
if (destinations.length > 0) {
|
||||
const proxyConfigured = destinations.find(
|
||||
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
|
||||
);
|
||||
if (proxyConfigured) {
|
||||
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||
}
|
||||
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
|
||||
}
|
||||
if (isCoolifyProxyUsed) {
|
||||
const settings = await prisma.setting.findFirst();
|
||||
if (settings?.isTraefikUsed) {
|
||||
await startTraefikProxy(engine);
|
||||
} else {
|
||||
await startCoolifyProxy(engine);
|
||||
}
|
||||
}
|
||||
return reply.code(201).send({ id: destination.id });
|
||||
} else {
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||
return reply.code(201).send();
|
||||
}
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteDestination(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const destination = await prisma.destinationDocker.delete({ where: { id } });
|
||||
if (destination.isCoolifyProxyUsed) {
|
||||
const host = getEngine(destination.engine);
|
||||
const { network } = destination;
|
||||
const settings = await prisma.setting.findFirst();
|
||||
const containerName = settings.isTraefikUsed ? 'coolify-proxy' : 'coolify-haproxy';
|
||||
const { stdout: found } = await asyncExecShell(
|
||||
`DOCKER_HOST=${host} docker ps -a --filter network=${network} --filter name=${containerName} --format '{{.}}'`
|
||||
);
|
||||
if (found) {
|
||||
await asyncExecShell(
|
||||
`DOCKER_HOST="${host}" docker network disconnect ${network} ${containerName}`
|
||||
);
|
||||
await asyncExecShell(`DOCKER_HOST="${host}" docker network rm ${network}`);
|
||||
}
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveDestinationSettings(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { engine, isCoolifyProxyUsed } = request.body;
|
||||
await prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
|
||||
return {
|
||||
status: 202
|
||||
}
|
||||
// return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function startProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||
const { engine } = request.body;
|
||||
try {
|
||||
await startTraefikProxy(engine);
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
await stopTraefikProxy(engine);
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function stopProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
const { engine } = request.body;
|
||||
try {
|
||||
await stopTraefikProxy(engine);
|
||||
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function restartProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
const { engine } = request.body;
|
||||
try {
|
||||
await stopTraefikProxy(engine);
|
||||
await startTraefikProxy(engine);
|
||||
await prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
24
apps/api/src/routes/api/v1/destinations/index.ts
Normal file
24
apps/api/src/routes/api/v1/destinations/index.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { checkDestination, deleteDestination, getDestination, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy } from './handlers';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listDestinations(request));
|
||||
fastify.post('/check', async (request) => await checkDestination(request));
|
||||
|
||||
fastify.get('/:id', async (request) => await getDestination(request));
|
||||
fastify.post('/:id', async (request, reply) => await newDestination(request, reply));
|
||||
fastify.delete('/:id', async (request) => await deleteDestination(request));
|
||||
|
||||
fastify.post('/:id/settings', async (request, reply) => await saveDestinationSettings(request, reply));
|
||||
fastify.post('/:id/start', async (request, reply) => await startProxy(request, reply));
|
||||
fastify.post('/:id/stop', async (request, reply) => await stopProxy(request, reply));
|
||||
fastify.post('/:id/restart', async (request, reply) => await restartProxy(request, reply));
|
||||
|
||||
|
||||
|
||||
};
|
||||
|
||||
export default root;
|
||||
280
apps/api/src/routes/api/v1/handlers.ts
Normal file
280
apps/api/src/routes/api/v1/handlers.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import os from 'node:os';
|
||||
import osu from 'node-os-utils';
|
||||
import axios from 'axios';
|
||||
import compare from 'compare-versions';
|
||||
import cuid from 'cuid';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { asyncExecShell, asyncSleep, errorHandler, isDev, prisma, uniqueName, version } from '../../../lib/common';
|
||||
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import type { Login, Update } from '.';
|
||||
|
||||
|
||||
export async function hashPassword(password: string): Promise<string> {
|
||||
const saltRounds = 15;
|
||||
return bcrypt.hash(password, saltRounds);
|
||||
}
|
||||
|
||||
|
||||
export async function checkUpdate(request: FastifyRequest) {
|
||||
try {
|
||||
const currentVersion = version;
|
||||
const { data: versions } = await axios.get(
|
||||
`https://get.coollabs.io/versions.json?appId=${process.env['COOLIFY_APP_ID']}&version=${currentVersion}`
|
||||
);
|
||||
const latestVersion =
|
||||
request.hostname === 'staging.coolify.io'
|
||||
? versions['coolify'].next.version
|
||||
: versions['coolify'].main.version;
|
||||
const isUpdateAvailable = compare(latestVersion, currentVersion);
|
||||
return {
|
||||
isUpdateAvailable: isUpdateAvailable === 1,
|
||||
latestVersion
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function update(request: FastifyRequest<Update>) {
|
||||
const { latestVersion } = request.body;
|
||||
try {
|
||||
if (!isDev) {
|
||||
const { isAutoUpdateEnabled } = (await prisma.setting.findFirst()) || {
|
||||
isAutoUpdateEnabled: false
|
||||
};
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||
);
|
||||
return {};
|
||||
} else {
|
||||
console.log(latestVersion);
|
||||
await asyncSleep(2000);
|
||||
return {};
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function showUsage() {
|
||||
try {
|
||||
return {
|
||||
usage: {
|
||||
uptime: os.uptime(),
|
||||
memory: await osu.mem.info(),
|
||||
cpu: {
|
||||
load: os.loadavg(),
|
||||
usage: await osu.cpu.usage(),
|
||||
count: os.cpus().length
|
||||
},
|
||||
disk: await osu.drive.info('/')
|
||||
}
|
||||
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function showDashboard(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
const applicationsCount = await prisma.application.count({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
const sourcesCount = await prisma.gitSource.count({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
const destinationsCount = await prisma.destinationDocker.count({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
const teamsCount = await prisma.permission.count({ where: { userId } });
|
||||
const databasesCount = await prisma.database.count({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
const servicesCount = await prisma.service.count({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
});
|
||||
const teams = await prisma.permission.findMany({
|
||||
where: { userId },
|
||||
include: { team: { include: { _count: { select: { users: true } } } } }
|
||||
});
|
||||
return {
|
||||
teams,
|
||||
applicationsCount,
|
||||
sourcesCount,
|
||||
destinationsCount,
|
||||
teamsCount,
|
||||
databasesCount,
|
||||
servicesCount,
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
|
||||
if (request.user) {
|
||||
return reply.redirect('/dashboard');
|
||||
} else {
|
||||
const { email, password, isLogin } = request.body || {};
|
||||
if (!email || !password) {
|
||||
throw { status: 500, message: 'Email and password are required.' };
|
||||
}
|
||||
const users = await prisma.user.count();
|
||||
const userFound = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { teams: true, permission: true },
|
||||
rejectOnNotFound: false
|
||||
});
|
||||
if (!userFound && isLogin) {
|
||||
throw { status: 500, message: 'User not found.' };
|
||||
}
|
||||
const { isRegistrationEnabled, id } = await prisma.setting.findFirst()
|
||||
let uid = cuid();
|
||||
let permission = 'read';
|
||||
let isAdmin = false;
|
||||
|
||||
if (users === 0) {
|
||||
await prisma.setting.update({ where: { id }, data: { isRegistrationEnabled: false } });
|
||||
uid = '0';
|
||||
}
|
||||
if (userFound) {
|
||||
if (userFound.type === 'email') {
|
||||
// TODO: Review this one
|
||||
if (userFound.password === 'RESETME') {
|
||||
const hashedPassword = await hashPassword(password);
|
||||
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: 'RESETTIMEOUT' }
|
||||
});
|
||||
throw {
|
||||
status: 500,
|
||||
message: 'Password reset link has expired. Please request a new one.'
|
||||
};
|
||||
} else {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: hashedPassword }
|
||||
});
|
||||
return {
|
||||
userId: userFound.id,
|
||||
teamId: userFound.id,
|
||||
permission: userFound.permission,
|
||||
isAdmin: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const passwordMatch = await bcrypt.compare(password, userFound.password);
|
||||
if (!passwordMatch) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: 'Wrong password or email address.'
|
||||
};
|
||||
}
|
||||
uid = userFound.id;
|
||||
isAdmin = true;
|
||||
}
|
||||
} else {
|
||||
permission = 'owner';
|
||||
isAdmin = true;
|
||||
if (!isRegistrationEnabled) {
|
||||
throw {
|
||||
status: 404,
|
||||
message: 'Registration disabled by administrator.'
|
||||
};
|
||||
}
|
||||
const hashedPassword = await hashPassword(password);
|
||||
if (users === 0) {
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: 'email',
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName(),
|
||||
destinationDocker: { connect: { network: 'coolify' } }
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
} else {
|
||||
await prisma.user.create({
|
||||
data: {
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: 'email',
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName()
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
}
|
||||
return {
|
||||
userId: uid,
|
||||
teamId: uid,
|
||||
permission,
|
||||
isAdmin
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function getCurrentUser(request: FastifyRequest, fastify) {
|
||||
let token = null
|
||||
|
||||
try {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: request.user.userId }
|
||||
})
|
||||
if (!user) {
|
||||
throw "User not found";
|
||||
}
|
||||
} catch (error) {
|
||||
throw { status: 401, message: error };
|
||||
}
|
||||
if (request.query.teamId) {
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: { id: request.user.userId, teams: { some: { id: request.query.teamId } } },
|
||||
include: { teams: true, permission: true }
|
||||
})
|
||||
if (user) {
|
||||
const payload = {
|
||||
...request.user,
|
||||
teamId: request.query.teamId,
|
||||
permission: user.permission.find(p => p.teamId === request.query.teamId).permission || null,
|
||||
isAdmin: user.permission.find(p => p.teamId === request.query.teamId).permission === 'owner'
|
||||
|
||||
}
|
||||
token = fastify.jwt.sign(payload)
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// No new token -> not switching teams
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings: await prisma.setting.findFirst(),
|
||||
token,
|
||||
...request.user
|
||||
}
|
||||
}
|
||||
453
apps/api/src/routes/api/v1/iam/handlers.ts
Normal file
453
apps/api/src/routes/api/v1/iam/handlers.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
import cuid from 'cuid';
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { decrypt, errorHandler, prisma, uniqueName } from '../../../../lib/common';
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
export async function listTeams(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
const account = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { id: true, email: true, teams: true }
|
||||
});
|
||||
let accounts = [];
|
||||
let allTeams = [];
|
||||
if (teamId === '0') {
|
||||
accounts = await prisma.user.findMany({ select: { id: true, email: true, teams: true } });
|
||||
allTeams = await prisma.team.findMany({
|
||||
where: { users: { none: { id: userId } } },
|
||||
include: { permissions: true }
|
||||
});
|
||||
}
|
||||
const ownTeams = await prisma.team.findMany({
|
||||
where: { users: { some: { id: userId } } },
|
||||
include: { permissions: true }
|
||||
});
|
||||
const invitations = await prisma.teamInvitation.findMany({ where: { uid: userId } });
|
||||
return {
|
||||
ownTeams,
|
||||
allTeams,
|
||||
invitations,
|
||||
account,
|
||||
accounts
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteTeam(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const { id } = request.params;
|
||||
|
||||
const aloneInTeams = await prisma.team.findMany({ where: { users: { every: { id: userId } }, id } });
|
||||
if (aloneInTeams.length > 0) {
|
||||
for (const team of aloneInTeams) {
|
||||
const applications = await prisma.application.findMany({
|
||||
where: { teams: { every: { id: team.id } } }
|
||||
});
|
||||
if (applications.length > 0) {
|
||||
for (const application of applications) {
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { teams: { connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
const services = await prisma.service.findMany({
|
||||
where: { teams: { every: { id: team.id } } }
|
||||
});
|
||||
if (services.length > 0) {
|
||||
for (const service of services) {
|
||||
await prisma.service.update({
|
||||
where: { id: service.id },
|
||||
data: { teams: { connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
const databases = await prisma.database.findMany({
|
||||
where: { teams: { every: { id: team.id } } }
|
||||
});
|
||||
if (databases.length > 0) {
|
||||
for (const database of databases) {
|
||||
await prisma.database.update({
|
||||
where: { id: database.id },
|
||||
data: { teams: { connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
const sources = await prisma.gitSource.findMany({
|
||||
where: { teams: { every: { id: team.id } } }
|
||||
});
|
||||
if (sources.length > 0) {
|
||||
for (const source of sources) {
|
||||
await prisma.gitSource.update({
|
||||
where: { id: source.id },
|
||||
data: { teams: { connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
const destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { every: { id: team.id } } }
|
||||
});
|
||||
if (destinations.length > 0) {
|
||||
for (const destination of destinations) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id: destination.id },
|
||||
data: { teams: { connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
await prisma.teamInvitation.deleteMany({ where: { teamId: team.id } });
|
||||
await prisma.permission.deleteMany({ where: { teamId: team.id } });
|
||||
// await prisma.user.delete({ where: { id } });
|
||||
await prisma.team.delete({ where: { id: team.id } });
|
||||
}
|
||||
}
|
||||
|
||||
const notAloneInTeams = await prisma.team.findMany({ where: { users: { some: { id: userId } } } });
|
||||
if (notAloneInTeams.length > 0) {
|
||||
for (const team of notAloneInTeams) {
|
||||
await prisma.team.update({
|
||||
where: { id: team.id },
|
||||
data: { users: { disconnect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function newTeam(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const userId = request.user?.userId;
|
||||
const name = uniqueName();
|
||||
const { id } = await prisma.team.create({
|
||||
data: {
|
||||
name,
|
||||
permissions: { create: { user: { connect: { id: userId } }, permission: 'owner' } },
|
||||
users: { connect: { id: userId } }
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({ id })
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getTeam(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
|
||||
const user = await prisma.user.findFirst({
|
||||
where: { id: userId, teams: teamId === '0' ? undefined : { some: { id } } },
|
||||
include: { permission: true }
|
||||
});
|
||||
if (!user) return reply.code(401).send()
|
||||
|
||||
const permissions = await prisma.permission.findMany({
|
||||
where: { teamId: id },
|
||||
include: { user: { select: { id: true, email: true } } }
|
||||
});
|
||||
const team = await prisma.team.findUnique({ where: { id }, include: { permissions: true } });
|
||||
const invitations = await prisma.teamInvitation.findMany({ where: { teamId: team.id } });
|
||||
return {
|
||||
team,
|
||||
permissions,
|
||||
invitations
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveTeam(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { name } = request.body;
|
||||
|
||||
await prisma.team.update({ where: { id }, data: { name: { set: name } } });
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
// export async function deleteUser(request: FastifyRequest, reply: FastifyReply) {
|
||||
// try {
|
||||
// const userId = request.user.userId;
|
||||
// const { id } = request.params;
|
||||
|
||||
// const aloneInTeams = await prisma.team.findMany({ where: { users: { every: { id: userId } }, id } });
|
||||
// if (aloneInTeams.length > 0) {
|
||||
// for (const team of aloneInTeams) {
|
||||
// const applications = await prisma.application.findMany({
|
||||
// where: { teams: { every: { id: team.id } } }
|
||||
// });
|
||||
// if (applications.length > 0) {
|
||||
// for (const application of applications) {
|
||||
// await prisma.application.update({
|
||||
// where: { id: application.id },
|
||||
// data: { teams: { connect: { id: '0' } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// const services = await prisma.service.findMany({
|
||||
// where: { teams: { every: { id: team.id } } }
|
||||
// });
|
||||
// if (services.length > 0) {
|
||||
// for (const service of services) {
|
||||
// await prisma.service.update({
|
||||
// where: { id: service.id },
|
||||
// data: { teams: { connect: { id: '0' } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// const databases = await prisma.database.findMany({
|
||||
// where: { teams: { every: { id: team.id } } }
|
||||
// });
|
||||
// if (databases.length > 0) {
|
||||
// for (const database of databases) {
|
||||
// await prisma.database.update({
|
||||
// where: { id: database.id },
|
||||
// data: { teams: { connect: { id: '0' } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// const sources = await prisma.gitSource.findMany({
|
||||
// where: { teams: { every: { id: team.id } } }
|
||||
// });
|
||||
// if (sources.length > 0) {
|
||||
// for (const source of sources) {
|
||||
// await prisma.gitSource.update({
|
||||
// where: { id: source.id },
|
||||
// data: { teams: { connect: { id: '0' } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// const destinations = await prisma.destinationDocker.findMany({
|
||||
// where: { teams: { every: { id: team.id } } }
|
||||
// });
|
||||
// if (destinations.length > 0) {
|
||||
// for (const destination of destinations) {
|
||||
// await prisma.destinationDocker.update({
|
||||
// where: { id: destination.id },
|
||||
// data: { teams: { connect: { id: '0' } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// await prisma.teamInvitation.deleteMany({ where: { teamId: team.id } });
|
||||
// await prisma.permission.deleteMany({ where: { teamId: team.id } });
|
||||
// await prisma.user.delete({ where: { id: userId } });
|
||||
// await prisma.team.delete({ where: { id: team.id } });
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
// const notAloneInTeams = await prisma.team.findMany({ where: { users: { some: { id: userId } } } });
|
||||
// if (notAloneInTeams.length > 0) {
|
||||
// for (const team of notAloneInTeams) {
|
||||
// await prisma.team.update({
|
||||
// where: { id: team.id },
|
||||
// data: { users: { disconnect: { id } } }
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
|
||||
// return reply.code(201).send()
|
||||
// } catch (error) {
|
||||
// console.log(error)
|
||||
// throw { status: 500, message: error }
|
||||
// }
|
||||
// }
|
||||
|
||||
export async function inviteToTeam(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const { email, permission, teamId, teamName } = request.body;
|
||||
const userFound = await prisma.user.findUnique({ where: { email } });
|
||||
if (!userFound) {
|
||||
throw `No user found with '${email}' email address.`
|
||||
}
|
||||
const uid = userFound.id;
|
||||
if (uid === userId) {
|
||||
throw `Invitation to yourself? Whaaaaat?`
|
||||
}
|
||||
const alreadyInTeam = await prisma.team.findFirst({
|
||||
where: { id: teamId, users: { some: { id: uid } } }
|
||||
});
|
||||
if (alreadyInTeam) {
|
||||
throw {
|
||||
message: `Already in the team.`
|
||||
};
|
||||
}
|
||||
const invitationFound = await prisma.teamInvitation.findFirst({ where: { uid, teamId } });
|
||||
if (invitationFound) {
|
||||
if (day().toDate() < day(invitationFound.createdAt).add(1, 'day').toDate()) {
|
||||
throw 'Invitiation already pending on user confirmation.'
|
||||
} else {
|
||||
await prisma.teamInvitation.delete({ where: { id: invitationFound.id } });
|
||||
await prisma.teamInvitation.create({
|
||||
data: { email, uid, teamId, teamName, permission }
|
||||
});
|
||||
return reply.code(201).send({ message: 'Invitiation sent.' })
|
||||
}
|
||||
} else {
|
||||
await prisma.teamInvitation.create({
|
||||
data: { email, uid, teamId, teamName, permission }
|
||||
});
|
||||
return reply.code(201).send({ message: 'Invitiation sent.' })
|
||||
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function acceptInvitation(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const { id } = request.body;
|
||||
const invitation = await prisma.teamInvitation.findFirst({
|
||||
where: { uid: userId },
|
||||
rejectOnNotFound: true
|
||||
});
|
||||
await prisma.team.update({
|
||||
where: { id: invitation.teamId },
|
||||
data: { users: { connect: { id: userId } } }
|
||||
});
|
||||
await prisma.permission.create({
|
||||
data: {
|
||||
user: { connect: { id: userId } },
|
||||
permission: invitation.permission,
|
||||
team: { connect: { id: invitation.teamId } }
|
||||
}
|
||||
});
|
||||
await prisma.teamInvitation.delete({ where: { id } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function revokeInvitation(request: FastifyRequest) {
|
||||
try {
|
||||
const { id } = request.body
|
||||
await prisma.teamInvitation.delete({ where: { id } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeUser(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { uid } = request.body;
|
||||
const user = await prisma.user.findUnique({ where: { id: uid }, include: { teams: true, permission: true } });
|
||||
if (user) {
|
||||
const permissions = user.permission;
|
||||
if (permissions.length > 0) {
|
||||
for (const permission of permissions) {
|
||||
await prisma.permission.deleteMany({ where: { id: permission.id, userId: uid } });
|
||||
}
|
||||
}
|
||||
const teams = user.teams;
|
||||
if (teams.length > 0) {
|
||||
for (const team of teams) {
|
||||
const newTeam = await prisma.team.update({
|
||||
where: { id: team.id },
|
||||
data: { users: { disconnect: { id: uid } } },
|
||||
include: { applications: true, database: true, gitHubApps: true, gitLabApps: true, gitSources: true, destinationDocker: true, service: true, users: true }
|
||||
});
|
||||
if (newTeam.users.length === 0) {
|
||||
if (newTeam.applications.length > 0) {
|
||||
for (const application of newTeam.applications) {
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.database.length > 0) {
|
||||
for (const database of newTeam.database) {
|
||||
await prisma.database.update({
|
||||
where: { id: database.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.service.length > 0) {
|
||||
for (const service of newTeam.service) {
|
||||
await prisma.service.update({
|
||||
where: { id: service.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.gitHubApps.length > 0) {
|
||||
for (const gitHubApp of newTeam.gitHubApps) {
|
||||
await prisma.githubApp.update({
|
||||
where: { id: gitHubApp.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.gitLabApps.length > 0) {
|
||||
for (const gitLabApp of newTeam.gitLabApps) {
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id: gitLabApp.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.gitSources.length > 0) {
|
||||
for (const gitSource of newTeam.gitSources) {
|
||||
await prisma.gitSource.update({
|
||||
where: { id: gitSource.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
if (newTeam.destinationDocker.length > 0) {
|
||||
for (const destinationDocker of newTeam.destinationDocker) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id: destinationDocker.id },
|
||||
data: { teams: { disconnect: { id: team.id }, connect: { id: '0' } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
await prisma.team.delete({ where: { id: team.id } });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.user.delete({ where: { id: uid } });
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function setPermission(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { userId, newPermission, permissionId } = request.body;
|
||||
await prisma.permission.updateMany({
|
||||
where: { id: permissionId, userId },
|
||||
data: { permission: { set: newPermission } }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function changePassword(request: FastifyRequest, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.body;
|
||||
await prisma.user.update({ where: { id: undefined }, data: { password: 'RESETME' } });
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user