mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
1035 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c3bb81f84d | ||
|
|
68a117a61d | ||
|
|
d624f95cb9 | ||
|
|
bedbe46830 | ||
|
|
7846bf1bc3 | ||
|
|
c28bc786ae | ||
|
|
60a53bb812 | ||
|
|
017ba61dc9 | ||
|
|
b5d82dc87b | ||
|
|
866070b9ea | ||
|
|
49ecb15773 | ||
|
|
047df77195 | ||
|
|
0904d1ff9c | ||
|
|
7bf6041d8c | ||
|
|
90597389c9 | ||
|
|
039350d762 | ||
|
|
ce31146a9c | ||
|
|
344dd7db28 | ||
|
|
b735ca2da7 | ||
|
|
cbdd098528 | ||
|
|
d5f9d57be9 | ||
|
|
d5f2d22663 | ||
|
|
9914686ed7 | ||
|
|
ee9acfe556 | ||
|
|
9f9d4297ca | ||
|
|
002637ec5f | ||
|
|
c1238c6594 | ||
|
|
c43b848708 | ||
|
|
e22950cecb | ||
|
|
5a7edcb762 | ||
|
|
9b47de71fc | ||
|
|
8f9462245a | ||
|
|
f0ed51cd22 | ||
|
|
99a7eff6ab | ||
|
|
edfed57df3 | ||
|
|
a70e35cb79 | ||
|
|
3da1b31363 | ||
|
|
59bc2dd8a7 | ||
|
|
7b6e7680a6 | ||
|
|
1c65df282e | ||
|
|
77ae070c98 | ||
|
|
20708f1456 | ||
|
|
9a1a67a4ef | ||
|
|
3a8e5df897 | ||
|
|
88a62be30c | ||
|
|
c478c1b7ad | ||
|
|
18c2b2e38e | ||
|
|
b105e6fbf8 | ||
|
|
da11bae67c | ||
|
|
d344a9bb4f | ||
|
|
222adb212b | ||
|
|
cb01bbe4ac | ||
|
|
c63237684a | ||
|
|
792d51d93f | ||
|
|
62bfb5dacc | ||
|
|
d7fa80703d | ||
|
|
52b712d90b | ||
|
|
331e13b7cb | ||
|
|
64bb4a2525 | ||
|
|
31d7e7e806 | ||
|
|
e740788d6c | ||
|
|
928d53e532 | ||
|
|
87ba4560ad | ||
|
|
9137e8bc32 | ||
|
|
35bd2b23d5 | ||
|
|
10a514d9ac | ||
|
|
71096acdff | ||
|
|
07da696397 | ||
|
|
41baf150c2 | ||
|
|
f0a52b2ef4 | ||
|
|
54e83fdff1 | ||
|
|
46327ff2fc | ||
|
|
8b26acc841 | ||
|
|
b90cb5a731 | ||
|
|
cd9b642c5e | ||
|
|
41a928d41b | ||
|
|
1388bee62c | ||
|
|
8ebc778d40 | ||
|
|
3a59091b41 | ||
|
|
e764c4651c | ||
|
|
10f04d2177 | ||
|
|
119f994b50 | ||
|
|
e39541c318 | ||
|
|
cf88885c94 | ||
|
|
1192346ce3 | ||
|
|
0e3bd85847 | ||
|
|
edeb6c6965 | ||
|
|
138fd5cb6d | ||
|
|
155410bd44 | ||
|
|
20bd829c2e | ||
|
|
7b7e222946 | ||
|
|
98d901d06c | ||
|
|
4e862cda6f | ||
|
|
4e940807ae | ||
|
|
b081743f54 | ||
|
|
34bb9f301f | ||
|
|
ed8a6daeea | ||
|
|
9e81ab43ac | ||
|
|
32d94cbe97 | ||
|
|
46a83aa457 | ||
|
|
08d7593ca9 | ||
|
|
a50f7a7cc2 | ||
|
|
2c33447f9f | ||
|
|
d67a3f51ec | ||
|
|
2719974262 | ||
|
|
eb5aebd58d | ||
|
|
98dbf3d8a5 | ||
|
|
d9489a2cb4 | ||
|
|
95832d34f7 | ||
|
|
d3e9aea63d | ||
|
|
d6972e2ed1 | ||
|
|
50844e98be | ||
|
|
5c6fcfebf9 | ||
|
|
84cfe6fb42 | ||
|
|
abf0aeb2a8 | ||
|
|
a7aca0ce8b | ||
|
|
67bb5d973b | ||
|
|
662948d622 | ||
|
|
f5bedfdf7f | ||
|
|
db9db61d92 | ||
|
|
d255cb1973 | ||
|
|
6529271de2 | ||
|
|
0dd32b5319 | ||
|
|
b032da798b | ||
|
|
a1a9f1531e | ||
|
|
f71b54deb2 | ||
|
|
c63430e342 | ||
|
|
6821b128ad | ||
|
|
3f8d44a01c | ||
|
|
3aef04437c | ||
|
|
53e32c038b | ||
|
|
1660510614 | ||
|
|
69f5601b3e | ||
|
|
6e22fecc98 | ||
|
|
d18b2b6a1f | ||
|
|
4b0370ac08 | ||
|
|
750ef80777 | ||
|
|
59c62923be | ||
|
|
68b220d06e | ||
|
|
250ea64203 | ||
|
|
0ab57396d2 | ||
|
|
1e36856e65 | ||
|
|
cfdc8db543 | ||
|
|
1f25bc411f | ||
|
|
972f77c790 | ||
|
|
795f99bb47 | ||
|
|
54f7142b2b | ||
|
|
26eacfc2c0 | ||
|
|
e2bf02841f | ||
|
|
6a59b8d27c | ||
|
|
7fc43ef2bb | ||
|
|
70a3fc247e | ||
|
|
56ab8312f1 | ||
|
|
6fb6a514ac | ||
|
|
b01f5f47b3 | ||
|
|
ebdd3601b3 | ||
|
|
c0d711170b | ||
|
|
da86f0076b | ||
|
|
9d8551a9be | ||
|
|
01ea86479d | ||
|
|
eb62888c39 | ||
|
|
b006fe8f68 | ||
|
|
dc3add495c | ||
|
|
59086e9eb4 | ||
|
|
e563988596 | ||
|
|
5a206a140c | ||
|
|
dbf910ff38 | ||
|
|
35b31dce2b | ||
|
|
1ec620be4b | ||
|
|
8516ac671a | ||
|
|
3b7fdebe8c | ||
|
|
17ac3048ac | ||
|
|
4e43efef50 | ||
|
|
4f4f5b1c01 | ||
|
|
1fa5c5e021 | ||
|
|
436e0e3a2b | ||
|
|
e717c1d599 | ||
|
|
ae5d90eb47 | ||
|
|
c095cb58b3 | ||
|
|
6bba37c36d | ||
|
|
60a428a952 | ||
|
|
c376123877 | ||
|
|
cd3663038f | ||
|
|
16b7c1708b | ||
|
|
3435f92fcb | ||
|
|
cef571b8cc | ||
|
|
242bc61e2d | ||
|
|
c917135bd3 | ||
|
|
3802158ad5 | ||
|
|
e452f68614 | ||
|
|
9586213dd1 | ||
|
|
30781f218c | ||
|
|
697c42ff66 | ||
|
|
37d8f1847c | ||
|
|
2af13fff55 | ||
|
|
51e8ca8de0 | ||
|
|
06228cd2a7 | ||
|
|
0033baafdc | ||
|
|
79dfc6a660 | ||
|
|
972b0fa811 | ||
|
|
ad51a9ebc8 | ||
|
|
51a40d049d | ||
|
|
8b3113bd92 | ||
|
|
d6b6938555 | ||
|
|
ce52608f19 | ||
|
|
ede37d296b | ||
|
|
6374b1284b | ||
|
|
6ac8dd8907 | ||
|
|
24c655d7ef | ||
|
|
1f087cc29a | ||
|
|
c3684a1650 | ||
|
|
a410fd0776 | ||
|
|
271fb1358d | ||
|
|
a4d53a28eb | ||
|
|
e69e32f6c7 | ||
|
|
650409dde3 | ||
|
|
f3f4bb5105 | ||
|
|
9c02af6b52 | ||
|
|
6a3f4ba171 | ||
|
|
6a6426fe6b | ||
|
|
21256746c3 | ||
|
|
c34d643f95 | ||
|
|
0be402af82 | ||
|
|
b5b0b6524d | ||
|
|
22f1a3c908 | ||
|
|
fa5f439858 | ||
|
|
7cc760eecf | ||
|
|
af0652f6b2 | ||
|
|
9e009bebaa | ||
|
|
8e53ae3484 | ||
|
|
7ceb8f1537 | ||
|
|
b0eae8cfe9 | ||
|
|
febef372b8 | ||
|
|
a18e3659aa | ||
|
|
e2e342851a | ||
|
|
bee3292088 | ||
|
|
f56d4dbbb3 | ||
|
|
eccd7c96d7 | ||
|
|
4046c472ed | ||
|
|
0da4a1024a | ||
|
|
aa2f328640 | ||
|
|
4d22b610b6 | ||
|
|
e91c3eab9c | ||
|
|
2e8fd6f0c7 | ||
|
|
90fde24b40 | ||
|
|
02a1f50776 | ||
|
|
57b97a9204 | ||
|
|
1ec03693d3 | ||
|
|
4246d86694 | ||
|
|
2cce1f8459 | ||
|
|
3937cfec53 | ||
|
|
259aeeb67a | ||
|
|
9d53bc0926 | ||
|
|
1211f3c9fd | ||
|
|
c07d6aa702 | ||
|
|
4f662dbf21 | ||
|
|
a4301c5d23 | ||
|
|
86b7824c78 | ||
|
|
435f063c36 | ||
|
|
902a764ff2 | ||
|
|
4097378847 | ||
|
|
5f3567e808 | ||
|
|
7325353ced | ||
|
|
68f5b32876 | ||
|
|
8d4eaad920 | ||
|
|
4b38865cc9 | ||
|
|
030cb124e5 | ||
|
|
fd363ec017 | ||
|
|
8b813fb07a | ||
|
|
326f0dac1b | ||
|
|
828faaf2b1 | ||
|
|
9582664406 | ||
|
|
ec5474b72b | ||
|
|
62d1011d9f | ||
|
|
0a7ec6bd20 | ||
|
|
b84c37cd8f | ||
|
|
887d65e512 | ||
|
|
3543a9c809 | ||
|
|
40da3ff9fe | ||
|
|
2315192f4b | ||
|
|
0faa1540f4 | ||
|
|
00cab67e73 | ||
|
|
b92bc9eebb | ||
|
|
1905db16e8 | ||
|
|
3e9cf7285b | ||
|
|
6fdbc572fe | ||
|
|
f94e17134e | ||
|
|
3fd50ebb12 | ||
|
|
40cbee0d75 | ||
|
|
0eb7f4526e | ||
|
|
646d92757a | ||
|
|
51efa01b11 | ||
|
|
dc4a63ef92 | ||
|
|
1b717ac091 | ||
|
|
e93d97f2bc | ||
|
|
45c904e876 | ||
|
|
880865f1f2 | ||
|
|
8e42203b89 | ||
|
|
2bd91fa970 | ||
|
|
a3fd95020d | ||
|
|
e5b1ce4eef | ||
|
|
531973baab | ||
|
|
b6e6a1ccf1 | ||
|
|
1140afe2c9 | ||
|
|
f8f17832de | ||
|
|
caaf030517 | ||
|
|
106aee31bd | ||
|
|
c98ed5338a | ||
|
|
48fa4ff245 | ||
|
|
d75d2880e5 | ||
|
|
ec907b0ce4 | ||
|
|
2cda0b22c2 | ||
|
|
a0076db42e | ||
|
|
a37cf49c2a | ||
|
|
c4833c3cc2 | ||
|
|
d03fbd9224 | ||
|
|
5998212b82 | ||
|
|
62ccab22d6 | ||
|
|
5ccea1cfcc | ||
|
|
8ccb1bd34c | ||
|
|
c1a48dcf1e | ||
|
|
11d74c0c1f | ||
|
|
8290ee856f | ||
|
|
08332c8321 | ||
|
|
046f738b7d | ||
|
|
07708155ac | ||
|
|
df5e23c7c2 | ||
|
|
41adc02801 | ||
|
|
72b650b086 | ||
|
|
06fe3f33c0 | ||
|
|
cbabf7fc51 | ||
|
|
6aeafda604 | ||
|
|
30d656698e | ||
|
|
94d1af01df | ||
|
|
af97d399b6 | ||
|
|
2f90fd1fe6 | ||
|
|
c05a140b0b | ||
|
|
cbfb9a3844 | ||
|
|
5a227f70c6 | ||
|
|
44a102443d | ||
|
|
cf7fdf198d | ||
|
|
68f2f4f978 | ||
|
|
029b623f08 | ||
|
|
fe3702847a | ||
|
|
e9b852a30e | ||
|
|
1d4e5df5a2 | ||
|
|
5e14b72fe4 | ||
|
|
8ebff72cde | ||
|
|
e16643c48c | ||
|
|
65c8f55ee6 | ||
|
|
fbc81ab3eb | ||
|
|
a4d56fd79a | ||
|
|
ce45cb8aca | ||
|
|
7f8428cd17 | ||
|
|
14d79031c1 | ||
|
|
b8aa7b6d08 | ||
|
|
397ca7f20e | ||
|
|
e10b76a46b | ||
|
|
b46566280d | ||
|
|
3ab6a231eb | ||
|
|
2bc2ae9b6e | ||
|
|
2b28f8bd8f | ||
|
|
dcdac29135 | ||
|
|
591ee29e0d | ||
|
|
625e71ab08 | ||
|
|
b0af54587b | ||
|
|
be3080df08 | ||
|
|
04685c9f9d | ||
|
|
1a83f2635f | ||
|
|
630aa45c87 | ||
|
|
0c3a381d1f | ||
|
|
ffac7c5c87 | ||
|
|
410800e81c | ||
|
|
9481beb61f | ||
|
|
141f2481a7 | ||
|
|
ea18f25adc | ||
|
|
9018184747 | ||
|
|
4fc2dd55f5 | ||
|
|
5ef9a282eb | ||
|
|
93a6518974 | ||
|
|
07aa285b27 | ||
|
|
bf01e9e29f | ||
|
|
d70672ba4b | ||
|
|
5eeb519ed6 | ||
|
|
5f047e4adf | ||
|
|
56b9a376bd | ||
|
|
0a1d31a188 | ||
|
|
64c9fb9a1b | ||
|
|
47aad15cd5 | ||
|
|
260a47a366 | ||
|
|
fd4bbe17f0 | ||
|
|
25ff637703 | ||
|
|
f571453696 | ||
|
|
5cd7533972 | ||
|
|
3a252509d0 | ||
|
|
2bd3802a6f | ||
|
|
ce2757f514 | ||
|
|
8419cdf604 | ||
|
|
907c2414ae | ||
|
|
f82207564f | ||
|
|
991a09838c | ||
|
|
25df4bfd85 | ||
|
|
d2f89d001b | ||
|
|
1971f227fd | ||
|
|
c1adffe260 | ||
|
|
e725887a55 | ||
|
|
5bf79b75b0 | ||
|
|
6926975e40 | ||
|
|
978a01c968 | ||
|
|
f421f5ee84 | ||
|
|
383831c7b8 | ||
|
|
41329facf7 | ||
|
|
7d3c644148 | ||
|
|
7fab9b5930 | ||
|
|
58763ef84c | ||
|
|
0e6abf172b | ||
|
|
9e681ece41 | ||
|
|
28f87a306d | ||
|
|
23e8833208 | ||
|
|
03962663c2 | ||
|
|
cc2ec55c4d | ||
|
|
ff2c38aa16 | ||
|
|
b5a9a2cea8 | ||
|
|
cd3f661f7e | ||
|
|
41bf6b5b86 | ||
|
|
a4e7c85184 | ||
|
|
19aca9ab35 | ||
|
|
08704c289a | ||
|
|
2224c22c6e | ||
|
|
b281889acd | ||
|
|
cfc50a27b0 | ||
|
|
ed5f21da6a | ||
|
|
78f3eb81dd | ||
|
|
6a833934ce | ||
|
|
45bf6f77d1 | ||
|
|
a1b3b7b687 | ||
|
|
7ebcad6abb | ||
|
|
fed6d2bf07 | ||
|
|
bea4943e9f | ||
|
|
1979e431b8 | ||
|
|
9bead1d6b4 | ||
|
|
56c4295e16 | ||
|
|
7c7b5a61e5 | ||
|
|
abaa13fda8 | ||
|
|
042bfeddbb | ||
|
|
f45ab067ce | ||
|
|
97a6f04aaa | ||
|
|
27f1e1d7cd | ||
|
|
417c01d6e0 | ||
|
|
b2e7435d0f | ||
|
|
73c9cb1d51 | ||
|
|
41c5dd3b53 | ||
|
|
bb0c93dc2f | ||
|
|
7953c1df30 | ||
|
|
c3f4245164 | ||
|
|
369001febb | ||
|
|
7ec296be6b | ||
|
|
d2f5a58f3b | ||
|
|
f4315144af | ||
|
|
e92775887d | ||
|
|
a5f1b4b675 | ||
|
|
8f3f9ebade | ||
|
|
157e5fd7aa | ||
|
|
5e7e1c11c7 | ||
|
|
e8516bc831 | ||
|
|
e3f78a1cf9 | ||
|
|
3449e0f8fc | ||
|
|
66af12f9b5 | ||
|
|
13acf09dcc | ||
|
|
ce71dccbc1 | ||
|
|
d9ba1a0b5c | ||
|
|
0b709c93a8 | ||
|
|
1657e5a151 | ||
|
|
a165b21950 | ||
|
|
0d0715a340 | ||
|
|
1bd33fea98 | ||
|
|
76754ded79 | ||
|
|
4da27a46a2 | ||
|
|
039953588e | ||
|
|
b8b4f559db | ||
|
|
2b0df270df | ||
|
|
b96c1a23ec | ||
|
|
f779b3bb54 | ||
|
|
6462982d12 | ||
|
|
84b4cc5d54 | ||
|
|
1bd2ccbc16 | ||
|
|
3abe1610bf | ||
|
|
61716738ed | ||
|
|
4e819f6eba | ||
|
|
fedb38f2bc | ||
|
|
aae108032c | ||
|
|
020013683b | ||
|
|
70de2538e2 | ||
|
|
9f581c82a9 | ||
|
|
eb2e07afc5 | ||
|
|
9c47b8495c | ||
|
|
2f8d0ee60c | ||
|
|
5bf14f4639 | ||
|
|
9da08d600b | ||
|
|
4d47eab07c | ||
|
|
f2061c5c25 | ||
|
|
430fc66ed7 | ||
|
|
bcb84b8126 | ||
|
|
dd83e86bc3 | ||
|
|
3e8a8364dc | ||
|
|
be41c0dd02 | ||
|
|
a17b7a564e | ||
|
|
f3cdda29bc | ||
|
|
de37ee9f1c | ||
|
|
8212868b92 | ||
|
|
b44d8578d9 | ||
|
|
0358cf2de2 | ||
|
|
94da008a47 | ||
|
|
456b1b8074 | ||
|
|
78e6a7d1d3 | ||
|
|
76dc7ffb68 | ||
|
|
211aff7170 | ||
|
|
bcacefb841 | ||
|
|
4505ad37d8 | ||
|
|
18cf57f33c | ||
|
|
9f2f5b40c3 | ||
|
|
8a401f50cb | ||
|
|
51a5b3b602 | ||
|
|
68f9bca054 | ||
|
|
e9e92c6e9e | ||
|
|
008cfdba09 | ||
|
|
9973197fa5 | ||
|
|
ec3b94cf96 | ||
|
|
c4cb92c78d | ||
|
|
c390f82246 | ||
|
|
b4f98e24a1 | ||
|
|
e042c5cfde | ||
|
|
faeae8fd6c | ||
|
|
ae4942ba29 | ||
|
|
fd652bfce6 | ||
|
|
3d72167721 | ||
|
|
ba284bef9e | ||
|
|
d18bb9cc74 | ||
|
|
a7ed3e58db | ||
|
|
8405ebd28d | ||
|
|
352bb65125 | ||
|
|
fe2cc5a99a | ||
|
|
7a2f29f6a3 | ||
|
|
9a05bfa899 | ||
|
|
39fa64e20d | ||
|
|
3a835b420e | ||
|
|
82f7633c3a | ||
|
|
9fdac2741a | ||
|
|
8fb5260809 | ||
|
|
e08ec12d26 | ||
|
|
1202e00a21 | ||
|
|
4ba2205af4 | ||
|
|
09841ad4cb | ||
|
|
d2dcd0abc8 | ||
|
|
fe9d0503fb | ||
|
|
8e9e6607e5 | ||
|
|
e1efd9355f | ||
|
|
ca705bbf89 | ||
|
|
b70fe09d17 | ||
|
|
d7d570393f | ||
|
|
41ca265e5a | ||
|
|
03cde08d67 | ||
|
|
5684674bd7 | ||
|
|
4fe919f2ea | ||
|
|
c8c23c53ef | ||
|
|
b1c25e98d7 | ||
|
|
7ab5a4bfcf | ||
|
|
a3ee57995c | ||
|
|
32020fd336 | ||
|
|
f1313b6468 | ||
|
|
3ef093c7e6 | ||
|
|
f5dfaa81d3 | ||
|
|
fcf206a081 | ||
|
|
9790d2b613 | ||
|
|
c39cb42601 | ||
|
|
0ead17ab70 | ||
|
|
4a6062522e | ||
|
|
201fa82efc | ||
|
|
d28433ee64 | ||
|
|
cc348bf0f5 | ||
|
|
b023d65fcf | ||
|
|
bd15d85732 | ||
|
|
b4bbd22781 | ||
|
|
d4c972584a | ||
|
|
edef4bd4a0 | ||
|
|
448611039c | ||
|
|
305fab488e | ||
|
|
38f0546f05 | ||
|
|
8cb679711d | ||
|
|
4d11867500 | ||
|
|
8232a7468b | ||
|
|
03e7af12be | ||
|
|
39f2e28a11 | ||
|
|
53947d805b | ||
|
|
15f8e44237 | ||
|
|
5ce1bc1ec5 | ||
|
|
c36bd34a1a | ||
|
|
781d034484 | ||
|
|
e4f701b148 | ||
|
|
5160d0780e | ||
|
|
8cd561b8cc | ||
|
|
b8b57bc48b | ||
|
|
58406f055e | ||
|
|
b049297082 | ||
|
|
a284928352 | ||
|
|
fe787538e3 | ||
|
|
360fb5ea37 | ||
|
|
45af5cbef8 | ||
|
|
463dacbe59 | ||
|
|
13891110ce | ||
|
|
01e0fb70c9 | ||
|
|
c1c25d59c8 | ||
|
|
6ac54e17f4 | ||
|
|
a82805846f | ||
|
|
a53bda1436 | ||
|
|
6309074844 | ||
|
|
b80e0d15fb | ||
|
|
7a0d151467 | ||
|
|
c55505af6c | ||
|
|
a788b7bc13 | ||
|
|
5f27fc0770 | ||
|
|
b814c6e563 | ||
|
|
8f58b14629 | ||
|
|
269250ef3d | ||
|
|
a3241516cb | ||
|
|
943300509b | ||
|
|
92d1f5aa55 | ||
|
|
7a74ba1796 | ||
|
|
614eb923d8 | ||
|
|
066f5b25e0 | ||
|
|
18f7ab1b95 | ||
|
|
78293340cc | ||
|
|
c47457a17f | ||
|
|
d00629b627 | ||
|
|
ddfbda6f80 | ||
|
|
d910b21185 | ||
|
|
b60b832426 | ||
|
|
adfc976f41 | ||
|
|
1b43976ff0 | ||
|
|
321fb019eb | ||
|
|
f6858a68e0 | ||
|
|
741db1778b | ||
|
|
809f40dec9 | ||
|
|
f3b5de4697 | ||
|
|
fe17e2eaba | ||
|
|
22ef0b5d29 | ||
|
|
823279fb60 | ||
|
|
19f661706d | ||
|
|
986c5b7133 | ||
|
|
4e334d4fff | ||
|
|
dcf7f92aab | ||
|
|
f56361c0ca | ||
|
|
4946ca2d91 | ||
|
|
f6a91cb53c | ||
|
|
726fbbb52a | ||
|
|
29d2278579 | ||
|
|
72ceeff022 | ||
|
|
54d65ec011 | ||
|
|
96aef5c4a6 | ||
|
|
7b64166fb0 | ||
|
|
1f5908e0b8 | ||
|
|
a4562d18b6 | ||
|
|
875e232199 | ||
|
|
80f95a4674 | ||
|
|
17d56aa972 | ||
|
|
f4ba60cf8f | ||
|
|
0b8a648f13 | ||
|
|
2576a3af2c | ||
|
|
2e6c73fa3c | ||
|
|
b8d8ee4560 | ||
|
|
d9b74ada84 | ||
|
|
01b058151b | ||
|
|
989d952f35 | ||
|
|
908af3e024 | ||
|
|
819157fda1 | ||
|
|
5a4458e93f | ||
|
|
1fbd403f34 | ||
|
|
098e519c55 | ||
|
|
3ef4a242f9 | ||
|
|
ad3044dce1 | ||
|
|
e40541d831 | ||
|
|
2786e7dbaf | ||
|
|
196d681a63 | ||
|
|
d2353e3c35 | ||
|
|
2475031f88 | ||
|
|
cd15e68adc | ||
|
|
27431f779d | ||
|
|
b9b5a2faeb | ||
|
|
e471b11d3b | ||
|
|
a742a3d2e3 | ||
|
|
c615f6c07e | ||
|
|
a6ebfb08f7 | ||
|
|
2b0d162226 | ||
|
|
2c5f09a8bb | ||
|
|
ef073e586b | ||
|
|
82bfdb87e3 | ||
|
|
767e7b80cb | ||
|
|
8d26ea9063 | ||
|
|
1a7c4310d0 | ||
|
|
4e8fe79e2b | ||
|
|
a8c5551292 | ||
|
|
2bf73109b2 | ||
|
|
f0ab3750bd | ||
|
|
58a11e37fe | ||
|
|
927bf46304 | ||
|
|
6b89857697 | ||
|
|
b72e5ccef6 | ||
|
|
6617b7811b | ||
|
|
e1c1988db4 | ||
|
|
af99ea4678 | ||
|
|
a6d5316090 | ||
|
|
f5e7a84fa6 | ||
|
|
c013764b61 | ||
|
|
2320ab0dfc | ||
|
|
1281a0f7e4 | ||
|
|
d8350cd4ee | ||
|
|
e3b7c23ed9 | ||
|
|
eae1ea21d6 | ||
|
|
541aa76b64 | ||
|
|
7b8555d524 | ||
|
|
fdf998c181 | ||
|
|
3d6b343adc | ||
|
|
e338cecc14 | ||
|
|
e5537a33fb | ||
|
|
35384deb68 | ||
|
|
547ca60c2a | ||
|
|
376f6f7455 | ||
|
|
abe92dedff | ||
|
|
4b521ceedc | ||
|
|
6dfcb9e52b | ||
|
|
335e3216e2 | ||
|
|
5b22bb4818 | ||
|
|
0097004882 | ||
|
|
1bc9e4c2d3 | ||
|
|
36c7e1a3c3 | ||
|
|
c6b4d04e26 | ||
|
|
fa6cf068c7 | ||
|
|
7c273a3a48 | ||
|
|
3de2ea1523 | ||
|
|
c5c9f84503 | ||
|
|
16ea9a3e07 | ||
|
|
48f952c798 | ||
|
|
f78ea5de07 | ||
|
|
5adbd5e784 | ||
|
|
5b2afa79d7 | ||
|
|
dc4e6d02b7 | ||
|
|
8ae61c8f78 | ||
|
|
684b8e0914 | ||
|
|
7c3314abae | ||
|
|
ab9f8ff356 | ||
|
|
892d8cd5c1 | ||
|
|
8b8b45778d | ||
|
|
6655fb182c | ||
|
|
0926d40247 | ||
|
|
ddc4d36688 | ||
|
|
53e1f22eb1 | ||
|
|
3d2a34737b | ||
|
|
ebde77008c | ||
|
|
3d27fd04ba | ||
|
|
d9fcaf3473 | ||
|
|
d266f761aa | ||
|
|
1d01405412 | ||
|
|
7c62eb5bd6 | ||
|
|
4dcc76d366 | ||
|
|
d2fad19a11 | ||
|
|
7c92c4c964 | ||
|
|
5a71d33236 | ||
|
|
1b4db4f793 | ||
|
|
c084b22815 | ||
|
|
acacef95cd | ||
|
|
5d722183d3 | ||
|
|
ac19ea5407 | ||
|
|
d19b05b970 | ||
|
|
a0795136ac | ||
|
|
d2566e345a | ||
|
|
66cd7cf90e | ||
|
|
9a599981ef | ||
|
|
f51f7bc82a | ||
|
|
dbcbac0137 | ||
|
|
e722f8a87c | ||
|
|
61679749eb | ||
|
|
23e12c9c44 | ||
|
|
6da78cd3e5 | ||
|
|
78ce8100a3 | ||
|
|
76ba338b45 | ||
|
|
823fe2deb2 | ||
|
|
cb90f692f2 | ||
|
|
0325343ede | ||
|
|
69d1556a1d | ||
|
|
2daa043840 | ||
|
|
f340ca9d05 | ||
|
|
02abd038fa | ||
|
|
b9da68ec28 | ||
|
|
88b3910d80 | ||
|
|
160412f6e4 | ||
|
|
59a86b25fc | ||
|
|
49e58b39f5 | ||
|
|
58e0757bbd | ||
|
|
5ff4197572 | ||
|
|
b56e28d27a | ||
|
|
c3d39e1dd4 | ||
|
|
716aa36bfd | ||
|
|
f01460170e | ||
|
|
a414ce282d | ||
|
|
6c32f3b130 | ||
|
|
4cf907c572 | ||
|
|
b28baaa5aa | ||
|
|
980dea64e0 | ||
|
|
c340f6436f | ||
|
|
54376fd105 | ||
|
|
ef006578b2 | ||
|
|
b0b1ee0c60 | ||
|
|
4e2026aa2d | ||
|
|
e0e50b4bd5 | ||
|
|
c9b52f1310 | ||
|
|
0195213dfb | ||
|
|
d6225cbde3 | ||
|
|
7b4c194b97 | ||
|
|
a5ecff24a3 | ||
|
|
c9c003dc9b | ||
|
|
fd95936219 | ||
|
|
15a3fd4456 | ||
|
|
df896542e4 | ||
|
|
8927e81274 | ||
|
|
340f061827 | ||
|
|
15cbac97c2 | ||
|
|
bb32d0f7d1 | ||
|
|
c370fba9ba | ||
|
|
6e32421172 | ||
|
|
6643687c0a | ||
|
|
ed01e78d77 | ||
|
|
93aed52f88 | ||
|
|
bb6d1fd6a3 | ||
|
|
6e33179fc2 | ||
|
|
277fd167cf | ||
|
|
98e8d5170b | ||
|
|
11ee1651ae | ||
|
|
0dfcf9b1e6 | ||
|
|
08f57ac5bc | ||
|
|
7095e781e9 | ||
|
|
df18b93809 | ||
|
|
0c2e028b38 | ||
|
|
80cb1bc129 | ||
|
|
74c1cb51f6 | ||
|
|
2e864bddf9 | ||
|
|
e60ae91b5d | ||
|
|
d606cd86a0 | ||
|
|
bc463c37f4 | ||
|
|
76c1480903 | ||
|
|
6f312caf8b | ||
|
|
980d8d374f | ||
|
|
c49b34942f | ||
|
|
fcfa8717a5 | ||
|
|
954a265965 | ||
|
|
69845a020a | ||
|
|
22200fd8a7 | ||
|
|
add441675d | ||
|
|
d3d9754277 | ||
|
|
aa5e2edbc5 | ||
|
|
310b099ecf | ||
|
|
1cfaef911c | ||
|
|
b931c5f638 | ||
|
|
7c683668eb | ||
|
|
cab7ac7d58 | ||
|
|
15e69c538a | ||
|
|
31ee938b66 | ||
|
|
e51a8d43d9 | ||
|
|
64cd5b6e4b | ||
|
|
6c9ef34905 | ||
|
|
aa89019236 | ||
|
|
df58fcee16 | ||
|
|
ea3ffc429f | ||
|
|
2efca7a2b5 | ||
|
|
9db448a5e2 | ||
|
|
feee90beef | ||
|
|
906a63b6b5 | ||
|
|
2ce64ac213 | ||
|
|
4d8bf57135 | ||
|
|
c5348ce4b3 | ||
|
|
7f87c03f97 | ||
|
|
9469f148ff | ||
|
|
ffb7dc4ec2 | ||
|
|
242b8fa746 | ||
|
|
50cae5ac3b | ||
|
|
6a71233eb2 | ||
|
|
1aff8933c9 | ||
|
|
0ed87a5dfc | ||
|
|
24a6bcbd1e | ||
|
|
ca7f3da19d | ||
|
|
bf047e2a3c | ||
|
|
4454287be9 | ||
|
|
3bd2183655 | ||
|
|
1f7080e8f8 | ||
|
|
8b20761e8b | ||
|
|
655d0b5d5f | ||
|
|
91849cdd3a | ||
|
|
df25a694c3 | ||
|
|
eabaca145e | ||
|
|
2f0e458765 | ||
|
|
ff8037f231 | ||
|
|
a116028e1b | ||
|
|
e606a02b29 | ||
|
|
531c712ea5 | ||
|
|
3ae7624361 | ||
|
|
fed83462fa | ||
|
|
58c9f937c5 | ||
|
|
5d14b9209d | ||
|
|
305a95fa74 | ||
|
|
b29c1e702a | ||
|
|
b04d75ab08 | ||
|
|
25abfaadb9 | ||
|
|
1df81b8698 | ||
|
|
4487846fd7 | ||
|
|
86918f5160 | ||
|
|
bc723b3f15 | ||
|
|
1881e646d4 | ||
|
|
aa98808a1a | ||
|
|
f9a2232703 | ||
|
|
19d6be8663 | ||
|
|
0eb7c890ad | ||
|
|
7bfa68aa58 | ||
|
|
857a38050e | ||
|
|
c5b7f92caf | ||
|
|
df31ffd7fb | ||
|
|
0df0322d36 | ||
|
|
260552322d | ||
|
|
88ef6496a2 | ||
|
|
bdf123bf7b | ||
|
|
8fc3760eef | ||
|
|
5656f6f709 | ||
|
|
53e7e8b77e | ||
|
|
b990915b7a | ||
|
|
15b7822ffd | ||
|
|
cfa28419cb | ||
|
|
30ef0d2a3a | ||
|
|
755f99200a | ||
|
|
7af79ed3a2 | ||
|
|
2971e14269 | ||
|
|
01954aaf30 | ||
|
|
da018a8f2a | ||
|
|
77400bbbb0 | ||
|
|
3c3333d3df | ||
|
|
4963bd4144 | ||
|
|
b4a418dded | ||
|
|
a724b0daee | ||
|
|
88aa620cb4 | ||
|
|
70d3448110 | ||
|
|
09a1a406a6 | ||
|
|
40939d0b7f | ||
|
|
aec1d184c8 | ||
|
|
69d3cb5dd8 | ||
|
|
3deff162bb | ||
|
|
4ed54568d3 | ||
|
|
004724da55 | ||
|
|
97e9b5ffe3 | ||
|
|
45f920f802 | ||
|
|
2b31532d19 | ||
|
|
e7a6ecf95b | ||
|
|
545c98cee0 | ||
|
|
d29ccbfe37 | ||
|
|
d0807862e6 | ||
|
|
b92616dc14 | ||
|
|
a1a436300d | ||
|
|
16a5aeb1ba | ||
|
|
872095ff7a | ||
|
|
d88f2ea4c3 | ||
|
|
02e0385ab8 | ||
|
|
c9751d4cd9 | ||
|
|
162b637992 | ||
|
|
a10ddd4063 | ||
|
|
f46ccc63a7 | ||
|
|
fc04a45744 | ||
|
|
90c2b59a51 | ||
|
|
d6bee99c1b | ||
|
|
0871d47568 | ||
|
|
5c646c1898 | ||
|
|
8974de165f | ||
|
|
e622294b87 | ||
|
|
cf9d32b556 | ||
|
|
e2d6b5bf64 | ||
|
|
dec58fd6d1 | ||
|
|
dbb2241213 | ||
|
|
3bd8ac5820 | ||
|
|
f514aa676d | ||
|
|
73fc9755dd | ||
|
|
5089c843b6 | ||
|
|
cd527f2bce | ||
|
|
82de234f21 | ||
|
|
ae6f325c0a | ||
|
|
c64bbbe426 | ||
|
|
eafd882a06 | ||
|
|
460ae85226 | ||
|
|
a64b095c13 | ||
|
|
7ea0de3fb8 | ||
|
|
b4c836afbd | ||
|
|
2d0f22b379 | ||
|
|
a8e9668c2b | ||
|
|
425feba0e2 | ||
|
|
c09b8d888f | ||
|
|
748e691a58 | ||
|
|
f8c81ff95f | ||
|
|
d11c4a3cd7 | ||
|
|
3f3ea151ef | ||
|
|
7e2f68870c | ||
|
|
df41cf14da | ||
|
|
111370c025 | ||
|
|
bcb2ba0b1b | ||
|
|
807d526ffa | ||
|
|
2ff9c5fed5 | ||
|
|
d43cd663d2 | ||
|
|
dae91267e8 | ||
|
|
b2d6317a23 | ||
|
|
c49b412e69 | ||
|
|
05e5d73556 | ||
|
|
53620f4b1a | ||
|
|
9d14b03eb1 | ||
|
|
04a5b1bd4f | ||
|
|
31b3f58b2c | ||
|
|
9c173d1de0 | ||
|
|
e11b6d74ed | ||
|
|
c7efe899fa | ||
|
|
adcd68c1ab | ||
|
|
23a4ebb74a | ||
|
|
cccb9a5fec | ||
|
|
b416e3ab3e | ||
|
|
e16b7d65d4 | ||
|
|
3744c64459 | ||
|
|
f742c2a3e2 | ||
|
|
142b83cc13 | ||
|
|
bad84289c4 | ||
|
|
166a573392 | ||
|
|
3585e365e7 | ||
|
|
5114ac7721 | ||
|
|
703d941f23 | ||
|
|
c691c52751 |
16
.devcontainer/Dockerfile
Normal file
16
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node/.devcontainer/base.Dockerfile
|
||||
|
||||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||
ARG VARIANT="16-bullseye"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||
# ARG EXTRA_NODE_VERSION=10
|
||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||
|
||||
# [Optional] Uncomment if you want to install more global node modules
|
||||
RUN su node -c "npm install -g pnpm"
|
||||
33
.devcontainer/devcontainer.json
Normal file
33
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,33 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node
|
||||
{
|
||||
"name": "Node.js",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||
"args": {
|
||||
"VARIANT": "16-bullseye"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"svelte.svelte-vscode",
|
||||
"ardenivanov.svelte-intellisense",
|
||||
"Prisma.prisma"
|
||||
],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
"docker-in-docker": "20.10",
|
||||
"github-cli": "latest"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,12 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
.routify
|
||||
client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
@@ -1,35 +0,0 @@
|
||||
####################################
|
||||
# Domain where your Coolify instance will be available and reachable.
|
||||
# It's the same as you set in Github OAuth App and Github App as <domain>.
|
||||
DOMAIN=
|
||||
## Let's Encrypt contact email required
|
||||
EMAIL=
|
||||
|
||||
# JWT Token Sign Key for logging you in to Coolify's frontend
|
||||
JWT_SIGN_KEY=
|
||||
# Encryption key for SECRETS - do NOT share it with others!
|
||||
SECRETS_ENCRYPTION_KEY=
|
||||
|
||||
# Docker Engine
|
||||
DOCKER_ENGINE=/var/run/docker.sock
|
||||
# Docker network to use internally between the proxy and your apps
|
||||
DOCKER_NETWORK=coollabs
|
||||
|
||||
# Mongodb
|
||||
# Values in case if you are using our Mongodb installation - CHANGE user and password fields!
|
||||
MONGODB_HOST=coollabs-mongodb
|
||||
MONGODB_PORT=27017
|
||||
MONGODB_USER=supercooldbuser
|
||||
MONGODB_PASSWORD=developmentPassword4db
|
||||
MONGODB_DB=coolLabs-prod
|
||||
|
||||
# Frontend only variables
|
||||
VITE_GITHUB_APP_CLIENTID=
|
||||
VITE_GITHUB_APP_NAME=
|
||||
|
||||
# Github OAuth & App secrets and private key - you can get it from Github.
|
||||
GITHUB_APP_CLIENT_SECRET=
|
||||
GITHUP_APP_WEBHOOK_SECRET=
|
||||
|
||||
# It should look like this. Newlines breaks with \n
|
||||
GITHUB_APP_PRIVATE_KEY="-----BEGIN RSA PRIVATE KEY-----\nMIIEowIBAAKCAQEA7Y+Uwkd8FINSwFktWGdtwCaOAazTDYR8ucEzGyR9r+ooJZhF\nOc32qgDSps6Q5DsqPOzvfhiviqU+et9VF+bJhfdzwJ+Le86QZH1RgsDMoY049XvI\nKSwP........"
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
open_collective: coollabsio
|
||||
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: 🐞 Bug report
|
||||
description: Create a bug report to help us improve coolify
|
||||
title: "[Bug]: "
|
||||
labels: [Bug]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this bug report! Please fill the form in English
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: A concise description of what you're experiencing and what you expect.
|
||||
placeholder: |
|
||||
When I do <X>, <Y> happens and I see the error message attached below:
|
||||
```...```
|
||||
What I expect is <Z>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps To Reproduce
|
||||
description: Add steps to reproduce this behaviour, include console / network logs & videos
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: "The version of your coolify Instance"
|
||||
placeholder: "2.5.2"
|
||||
validations:
|
||||
required: true
|
||||
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: 🛠️ Feature request
|
||||
description: Suggest an idea to improve coolify
|
||||
title: '[Feature]: '
|
||||
labels: [Enhancement]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this feature request already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: One paragraph description of the feature.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Why should this be worked on?
|
||||
description: A concise description of the problems or use cases for this feature request.
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: 📝 Task
|
||||
description: Create a task for the team to work on
|
||||
title: "[Task]: "
|
||||
labels: [Task]
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue related to this already exists.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: SubTasks
|
||||
placeholder: |
|
||||
- Sub Task 1
|
||||
- Sub Task 2
|
||||
validations:
|
||||
required: false
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 🤔 Questions and Help
|
||||
url: https://discord.com/invite/6rDM4fkymF
|
||||
about: Reach out to us on discord or our github discussions page.
|
||||
- name: 🙋♂️ service request
|
||||
url: https://feedback.coolify.io/
|
||||
about: want to request a new service? for e.g wordpress, hasura, appwrite etc...
|
||||
37
.github/workflows/production-release.yml
vendored
Normal file
37
.github/workflows/production-release.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: production-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}
|
||||
35
.github/workflows/staging-release.yml
vendored
Normal file
35
.github/workflows/staging-release.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: staging-release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- next
|
||||
|
||||
jobs:
|
||||
staging-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -1,11 +1,13 @@
|
||||
.vscode
|
||||
.idea
|
||||
.DS_Store
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
.routify
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
yarn-error.log
|
||||
api/development/console.log
|
||||
.pnpm-debug.log
|
||||
yarn.lock
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
apps/api/db/migration.db-journal
|
||||
11
.gitpod.yml
Normal file
11
.gitpod.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
# This configuration file was automatically generated by Gitpod.
|
||||
# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)
|
||||
# and commit this file to your remote git repository to share the goodness with others.
|
||||
image: gitpod/workspace-node:2022-06-20-19-54-55
|
||||
tasks:
|
||||
- init: pnpm install && pnpm db:push && pnpm db:seed
|
||||
command: pnpm dev
|
||||
|
||||
ports:
|
||||
- port: 3001
|
||||
visibility: public
|
||||
14
.prettierrc
14
.prettierrc
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"bracketSpacing": true,
|
||||
"printWidth": 80,
|
||||
"semi": true,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"svelteSortOrder" : "styles-scripts-markup",
|
||||
"svelteStrictMode": true,
|
||||
"svelteBracketNewLine": true,
|
||||
"svelteAllowShorthand": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
}
|
||||
11
.vscode/settings.json
vendored
Normal file
11
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.extract.ignoredByFiles": {
|
||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
||||
},
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
||||
"i18n-ally.extract.autoDetect": true
|
||||
}
|
||||
281
CONTRIBUTING.md
Normal file
281
CONTRIBUTING.md
Normal file
@@ -0,0 +1,281 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
Contribution guide is for v2, not applicable for v3
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
### Setup with github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup locally in your machine
|
||||
|
||||
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
#### Steps for local setup
|
||||
|
||||
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
2. Install dependencies with `pnpm install`.
|
||||
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
4. Seed the database with base entities with `pnpm db:seed`
|
||||
5. You can start coding after starting `pnpm dev`.
|
||||
|
||||
## 🧑💻 Developer contribution
|
||||
|
||||
### Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: Svelte / SvelteKit
|
||||
- **Database ORM**: Prisma.io
|
||||
- **Docker Engine**
|
||||
|
||||
### Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
### Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
|
||||
|
||||
---
|
||||
|
||||
# How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationshup with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Update global functions
|
||||
|
||||
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: Prisma.ServiceInclude = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
```js
|
||||
if (body.umami?.postgresqlPassword)
|
||||
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
|
||||
|
||||
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
### Create API endpoints.
|
||||
|
||||
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
|
||||
|
||||
#### `index.json.ts`:
|
||||
|
||||
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
|
||||
|
||||
```js
|
||||
import { getUserDetails } from '$lib/common';
|
||||
import * as db from '$lib/database';
|
||||
import { ErrorHandler } from '$lib/database';
|
||||
import type { RequestHandler } from '@sveltejs/kit';
|
||||
|
||||
export const post: RequestHandler = async (event) => {
|
||||
const { status, body } = await getUserDetails(event);
|
||||
if (status === 401) return { status, body };
|
||||
|
||||
const { id } = event.params;
|
||||
|
||||
let { name, fqdn } = await event.request.json();
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
|
||||
try {
|
||||
await db.updateService({ id, fqdn, name });
|
||||
return { status: 201 };
|
||||
} catch (error) {
|
||||
return ErrorHandler(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
If it's necessary, you can create your own database update function, specifically for the new service.
|
||||
|
||||
#### `start.json.ts`
|
||||
|
||||
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
|
||||
|
||||
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
|
||||
|
||||
#### `stop.json.ts`
|
||||
|
||||
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
|
||||
|
||||
## Frontend
|
||||
|
||||
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
2. You need to include it the logo at
|
||||
|
||||
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
|
||||
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
|
||||
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
|
||||
|
||||
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
|
||||
|
||||
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
|
||||
|
||||
## 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating!
|
||||
43
Dockerfile
Normal file
43
Dockerfile
Normal file
@@ -0,0 +1,43 @@
|
||||
FROM node:18-alpine as build
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
RUN pnpm build
|
||||
|
||||
# Production build
|
||||
FROM node:18-alpine
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV production
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:3.15 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
|
||||
COPY --from=build /app/apps/api/build/ .
|
||||
COPY --from=build /app/apps/ui/build/ ./public
|
||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||
COPY --from=build /app/apps/api/package.json .
|
||||
COPY --from=build /app/docker-compose.yaml .
|
||||
|
||||
RUN pnpm install -p
|
||||
|
||||
EXPOSE 3000
|
||||
CMD pnpm start
|
||||
148
README.md
148
README.md
@@ -1,93 +1,117 @@
|
||||
# About
|
||||
# Coolify
|
||||
|
||||
https://andrasbacsai.com/farewell-netlify-and-heroku-after-3-days-of-coding
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
# Features
|
||||
- Deploy your Node.js and static sites just by pushing code to git.
|
||||
- Hassle-free installation and upgrade process.
|
||||
- One-click MongoDB, MySQL, PostgreSQL, CouchDB deployments!
|
||||
## Live Demo
|
||||
|
||||
# Upcoming features
|
||||
- Backups & monitoring.
|
||||
- User analytics with privacy in mind.
|
||||
- And much more (see [Roadmap](https://github.com/coollabsio/coolify/projects/1)).
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsive, that means someone overloaded the server. 😄)
|
||||
|
||||
# FAQ
|
||||
Q: What is a buildpack?
|
||||
## Feedback
|
||||
|
||||
A: It defines your application's final form.
|
||||
`Static` means that it will be hosted as a static site.
|
||||
`NodeJs` means that it will be started as a node application.
|
||||
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
# Screenshots
|
||||
## How to install
|
||||
|
||||
[Login](https://coollabs.io/coolify/login.jpg)
|
||||
Installation is automated with the following command:
|
||||
|
||||
[Applications](https://coollabs.io/coolify/applications.jpg)
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
[Databases](https://coollabs.io/coolify/databases.jpg)
|
||||
If you would like no questions during installation:
|
||||
|
||||
[Configuration](https://coollabs.io/coolify/configuration.jpg)
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
[Settings](https://coollabs.io/coolify/settings.jpg)
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
[Logs](https://coollabs.io/coolify/logs.jpg)
|
||||
## Features
|
||||
|
||||
# Getting Started
|
||||
ARM support is in beta!
|
||||
|
||||
Automatically: `sh <(curl -fsSL https://get.coollabs.io/install.sh) coolify`
|
||||
### Git Sources
|
||||
|
||||
Manually:
|
||||
### Requirements before installation
|
||||
- [Docker](https://docs.docker.com/engine/install/) version 20+
|
||||
- Docker in [swarm mode enabled](https://docs.docker.com/engine/reference/commandline/swarm_init/) (should be set manually before installation)
|
||||
- A [MongoDB](https://docs.mongodb.com/manual/installation/) instance.
|
||||
- We have a [simple installation](https://github.com/coollabsio/infrastructure/tree/main/mongo) if you need one
|
||||
- A configured DNS entry (see `.env.template`)
|
||||
- [Github App](https://docs.github.com/en/developers/apps/creating-a-github-app)
|
||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
||||
|
||||
- GitHub App name: could be anything weird
|
||||
- Homepage URL: https://yourdomain
|
||||
- Github
|
||||
- GitLab
|
||||
- Bitbucket (WIP)
|
||||
|
||||
Identifying and authorizing users:
|
||||
- Callback URL: https://yourdomain/api/v1/login/github/app
|
||||
- Request user authorization (OAuth) during installation -> Check!
|
||||
### Destinations
|
||||
|
||||
Webhook:
|
||||
- Active -> Check!
|
||||
- Webhook URL: https://yourdomain/api/v1/webhooks/deploy
|
||||
- Webhook Secret: it should be super secret
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
Repository permissions:
|
||||
- Contents: Read-only
|
||||
- Metadata: Read-only
|
||||
|
||||
User permissions:
|
||||
- Email: Read-only
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine (WIP)
|
||||
- Kubernetes (WIP)
|
||||
|
||||
Subscribe to events:
|
||||
- Push -> Check!
|
||||
### Applications
|
||||
|
||||
### Installation
|
||||
- Clone this repository: `git clone git@github.com:coollabsio/coolify.git`
|
||||
- Set `.env` (see `.env.template`)
|
||||
- Installation: `bash install.sh all`
|
||||
These are the predefined build packs, but with the Docker build pack, you can host anything that is hostable with a single Dockerfile.
|
||||
|
||||
## Manual updating process (You probably never need to do this!)
|
||||
### Update everything (proxy+coolify)
|
||||
- `bash install.sh all`
|
||||
- Static sites
|
||||
- NodeJS
|
||||
- VueJS
|
||||
- NuxtJS
|
||||
- NextJS
|
||||
- React/Preact
|
||||
- Gatsby
|
||||
- Svelte
|
||||
- PHP
|
||||
- Laravel
|
||||
- Rust
|
||||
- Docker
|
||||
- Python
|
||||
- Deno
|
||||
|
||||
### Update coolify only
|
||||
- `bash install.sh coolify`
|
||||
### Databases
|
||||
|
||||
### Update proxy only
|
||||
- `bash install.sh proxy`
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
- MongoDB
|
||||
- MariaDB
|
||||
- MySQL
|
||||
- PostgreSQL
|
||||
- CouchDB
|
||||
- Redis
|
||||
|
||||
### One-click services
|
||||
|
||||
You can host cool open-source services as well:
|
||||
|
||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 and v3 are not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
# Contact
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
## Contribute
|
||||
|
||||
See [our contribution guide](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
# License
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
|
||||
27
api/app.js
27
api/app.js
@@ -1,27 +0,0 @@
|
||||
module.exports = async function (fastify, opts) {
|
||||
// Private routes
|
||||
fastify.register(async function (server) {
|
||||
if (process.env.NODE_ENV === 'production') server.register(require('./plugins/authentication'))
|
||||
server.register(require('./routes/v1/upgrade'), { prefix: '/upgrade' })
|
||||
server.register(require('./routes/v1/settings'), { prefix: '/settings' })
|
||||
server.register(require('./routes/v1/dashboard'), { prefix: '/dashboard' })
|
||||
server.register(require('./routes/v1/config'), { prefix: '/config' })
|
||||
server.register(require('./routes/v1/application/remove'), { prefix: '/application/remove' })
|
||||
server.register(require('./routes/v1/application/logs'), { prefix: '/application/logs' })
|
||||
server.register(require('./routes/v1/application/check'), { prefix: '/application/check' })
|
||||
server.register(require('./routes/v1/application/deploy'), { prefix: '/application/deploy' })
|
||||
server.register(require('./routes/v1/application/deploy/logs'), { prefix: '/application/deploy/logs' })
|
||||
server.register(require('./routes/v1/databases'), { prefix: '/databases' })
|
||||
})
|
||||
// Public routes
|
||||
fastify.register(require('./routes/v1/verify'), { prefix: '/verify' })
|
||||
fastify.register(require('./routes/v1/login/github'), {
|
||||
prefix: '/login/github'
|
||||
})
|
||||
fastify.register(require('./routes/v1/webhooks/deploy'), {
|
||||
prefix: '/webhooks/deploy'
|
||||
})
|
||||
fastify.register(require('./routes/v1/undead'), {
|
||||
prefix: '/undead'
|
||||
})
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server-core')
|
||||
|
||||
const mongoServer = new MongoMemoryServer({
|
||||
instance: {
|
||||
port: 27017,
|
||||
dbName: 'coolify',
|
||||
storageEngine: 'wiredTiger'
|
||||
},
|
||||
binary: {
|
||||
version: '4.4.3'
|
||||
|
||||
}
|
||||
})
|
||||
|
||||
mongoose.Promise = Promise
|
||||
mongoServer.getUri().then((mongoUri) => {
|
||||
const mongooseOpts = {
|
||||
useNewUrlParser: true,
|
||||
useUnifiedTopology: true
|
||||
}
|
||||
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
|
||||
mongoose.connection.on('error', (e) => {
|
||||
if (e.message.code === 'ETIMEDOUT') {
|
||||
console.log(e)
|
||||
mongoose.connect(mongoUri, mongooseOpts)
|
||||
}
|
||||
console.log(e)
|
||||
})
|
||||
|
||||
mongoose.connection.once('open', () => {
|
||||
console.log(`Started in-memory mongodb ${mongoUri}`)
|
||||
})
|
||||
})
|
||||
@@ -1,34 +0,0 @@
|
||||
const packs = require('../../../packs')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const deployId = configuration.general.deployId
|
||||
|
||||
const execute = packs[configuration.build.pack]
|
||||
if (execute) {
|
||||
try {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'inprogress' })
|
||||
await saveAppLog('### Building application.', configuration)
|
||||
|
||||
await execute(configuration)
|
||||
|
||||
await saveAppLog('### Building done.', configuration)
|
||||
} catch (error) {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
if (error.stack) throw { error: error.stack, type: 'server' }
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
} else {
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'failed' })
|
||||
throw { error: 'No buildpack found.', type: 'app' }
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
const { docker } = require('../../docker')
|
||||
const { execShellAsync } = require('../../common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
async function purgeOldThings () {
|
||||
try {
|
||||
// TODO: Tweak this, because it deletes coolify-base, so the upgrade will be slow
|
||||
await docker.engine.pruneImages()
|
||||
await docker.engine.pruneContainers()
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanup (configuration) {
|
||||
const { id } = configuration.repository
|
||||
const deployId = configuration.general.deployId
|
||||
try {
|
||||
// Cleanup stucked deployments.
|
||||
const deployments = await Deployment.find({ repoId: id, deployId: { $ne: deployId }, progress: { $in: ['queued', 'inprogress'] } })
|
||||
for (const deployment of deployments) {
|
||||
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } })
|
||||
}
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteSameDeployments (configuration) {
|
||||
try {
|
||||
await (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(async s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
await execShellAsync(`docker stack rm ${s.Spec.Labels['com.docker.stack.namespace']}`)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { cleanup, deleteSameDeployments, purgeOldThings }
|
||||
@@ -1,88 +0,0 @@
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const cuid = require('cuid')
|
||||
const crypto = require('crypto')
|
||||
|
||||
const { execShellAsync } = require('../common')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
|
||||
function setDefaultConfiguration (configuration) {
|
||||
try {
|
||||
const nickname = getUniq()
|
||||
const deployId = cuid()
|
||||
|
||||
const shaBase = JSON.stringify({ repository: configuration.repository })
|
||||
const sha256 = crypto.createHash('sha256').update(shaBase).digest('hex')
|
||||
|
||||
const baseServiceConfiguration = {
|
||||
replicas: 1,
|
||||
restart_policy: {
|
||||
condition: 'any',
|
||||
max_attempts: 3
|
||||
},
|
||||
update_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 1,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
}
|
||||
}
|
||||
|
||||
configuration.build.container.name = sha256.slice(0, 15)
|
||||
|
||||
configuration.general.nickname = nickname
|
||||
configuration.general.deployId = deployId
|
||||
configuration.general.workdir = `/tmp/${deployId}`
|
||||
|
||||
if (!configuration.publish.path) configuration.publish.path = '/'
|
||||
if (!configuration.publish.port) {
|
||||
if (configuration.build.pack === 'php') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'static') {
|
||||
configuration.publish.port = 80
|
||||
} else if (configuration.build.pack === 'nodejs') {
|
||||
configuration.publish.port = 3000
|
||||
}
|
||||
}
|
||||
if (!configuration.build.directory) {
|
||||
configuration.build.directory = '/'
|
||||
}
|
||||
if (configuration.build.pack === 'static' || configuration.build.pack === 'nodejs') {
|
||||
if (!configuration.build.command.installation) configuration.build.command.installation = 'yarn install'
|
||||
}
|
||||
|
||||
configuration.build.container.baseSHA = crypto.createHash('sha256').update(JSON.stringify(baseServiceConfiguration)).digest('hex')
|
||||
configuration.baseServiceConfiguration = baseServiceConfiguration
|
||||
|
||||
return configuration
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
|
||||
async function updateServiceLabels (configuration, services) {
|
||||
// In case of any failure during deployment, still update the current configuration.
|
||||
const found = services.find(s => {
|
||||
const config = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (config.repository.id === configuration.repository.id && config.repository.branch === configuration.repository.branch) {
|
||||
return config
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const { ID } = found
|
||||
try {
|
||||
const Labels = { ...JSON.parse(found.Spec.Labels.configuration), ...configuration }
|
||||
execShellAsync(`docker service update --label-add configuration='${JSON.stringify(Labels)}' --label-add com.docker.stack.image='${configuration.build.container.name}:${configuration.build.container.tag}' ${ID}`)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
module.exports = { setDefaultConfiguration, updateServiceLabels }
|
||||
@@ -1,53 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
module.exports = async function (configuration) {
|
||||
try {
|
||||
// TODO: Do it better.
|
||||
await fs.writeFile(`${configuration.general.workdir}/.dockerignore`, 'node_modules')
|
||||
await fs.writeFile(
|
||||
`${configuration.general.workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
|
||||
access_log off;
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
)
|
||||
} catch (error) {
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const { execShellAsync } = require('../../common')
|
||||
const { docker } = require('../../docker')
|
||||
const { saveAppLog } = require('../../logging')
|
||||
const { deleteSameDeployments } = require('../cleanup')
|
||||
|
||||
module.exports = async function (configuration, configChanged, imageChanged) {
|
||||
try {
|
||||
const generateEnvs = {}
|
||||
for (const secret of configuration.publish.secrets) {
|
||||
generateEnvs[secret.name] = secret.value
|
||||
}
|
||||
const containerName = configuration.build.container.name
|
||||
|
||||
// Only save SHA256 of it in the configuration label
|
||||
const baseServiceConfiguration = configuration.baseServiceConfiguration
|
||||
delete configuration.baseServiceConfiguration
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[containerName]: {
|
||||
image: `${configuration.build.container.name}:${configuration.build.container.tag}`,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
deploy: {
|
||||
...baseServiceConfiguration,
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=application',
|
||||
'configuration=' + JSON.stringify(configuration),
|
||||
'traefik.enable=true',
|
||||
'traefik.http.services.' +
|
||||
configuration.build.container.name +
|
||||
`.loadbalancer.server.port=${configuration.publish.port}`,
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.entrypoints=websecure',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.rule=Host(`' +
|
||||
configuration.publish.domain +
|
||||
'`) && PathPrefix(`' +
|
||||
configuration.publish.path +
|
||||
'`)',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.tls.certresolver=letsencrypt',
|
||||
'traefik.http.routers.' +
|
||||
configuration.build.container.name +
|
||||
'.middlewares=global-compress'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveAppLog('### Publishing.', configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
// TODO: Compare stack.yml with the currently running one to upgrade if something changes, like restart_policy
|
||||
if (imageChanged) {
|
||||
// console.log('image changed')
|
||||
await execShellAsync(`docker service update --image ${configuration.build.container.name}:${configuration.build.container.tag} ${configuration.build.container.name}_${configuration.build.container.name}`)
|
||||
} else {
|
||||
// console.log('new deployment or force deployment or config changed')
|
||||
await deleteSameDeployments(configuration)
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy --prune -c - ${containerName}`
|
||||
)
|
||||
}
|
||||
|
||||
await saveAppLog('### Published done!', configuration)
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
await saveAppLog(`Error occured during deployment: ${error.message}`, configuration)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const jwt = require('jsonwebtoken')
|
||||
const axios = require('axios')
|
||||
const { execShellAsync, cleanupTmp } = require('../../common')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const { workdir } = configuration.general
|
||||
const { organization, name, branch } = configuration.repository
|
||||
const github = configuration.github
|
||||
|
||||
const githubPrivateKey = process.env.GITHUB_APP_PRIVATE_KEY.replace(/\\n/g, '\n').replace(/"/g, '')
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: parseInt(github.app.id)
|
||||
}
|
||||
|
||||
try {
|
||||
const jwtToken = jwt.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
})
|
||||
const accessToken = await axios({
|
||||
method: 'POST',
|
||||
url: `https://api.github.com/app/installations/${github.installation.id}/access_tokens`,
|
||||
data: {},
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + jwtToken,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
await execShellAsync(
|
||||
`mkdir -p ${workdir} && git clone -q -b ${branch} https://x-access-token:${accessToken.data.token}@github.com/${organization}/${name}.git ${workdir}/`
|
||||
)
|
||||
configuration.build.container.tag = (
|
||||
await execShellAsync(`cd ${configuration.general.workdir}/ && git rev-parse HEAD`)
|
||||
)
|
||||
.replace('\n', '')
|
||||
.slice(0, 7)
|
||||
} catch (error) {
|
||||
cleanupTmp(workdir)
|
||||
if (error.stack) console.log(error.stack)
|
||||
throw { error, type: 'server' }
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
const { saveServerLog } = require('../logging')
|
||||
const { cleanupTmp } = require('../common')
|
||||
|
||||
const { saveAppLog } = require('../logging')
|
||||
const copyFiles = require('./deploy/copyFiles')
|
||||
const buildContainer = require('./build/container')
|
||||
const deploy = require('./deploy/deploy')
|
||||
const Deployment = require('../../models/Deployment')
|
||||
const { cleanup, purgeOldThings } = require('./cleanup')
|
||||
const { updateServiceLabels } = require('./configuration')
|
||||
|
||||
async function queueAndBuild (configuration, services, configChanged, imageChanged) {
|
||||
const { id, organization, name, branch } = configuration.repository
|
||||
const { domain } = configuration.publish
|
||||
const { deployId, nickname, workdir } = configuration.general
|
||||
try {
|
||||
await new Deployment({
|
||||
repoId: id, branch, deployId, domain, organization, name, nickname
|
||||
}).save()
|
||||
await saveAppLog(`${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} Queued.`, configuration)
|
||||
await copyFiles(configuration)
|
||||
await buildContainer(configuration)
|
||||
await deploy(configuration, configChanged, imageChanged)
|
||||
await Deployment.findOneAndUpdate(
|
||||
{ repoId: id, branch, deployId, organization, name, domain },
|
||||
{ repoId: id, branch, deployId, organization, name, domain, progress: 'done' })
|
||||
await updateServiceLabels(configuration, services)
|
||||
cleanupTmp(workdir)
|
||||
await purgeOldThings()
|
||||
} catch (error) {
|
||||
await cleanup(configuration)
|
||||
cleanupTmp(workdir)
|
||||
const { type } = error.error
|
||||
if (type === 'app') {
|
||||
await saveAppLog(error.error, configuration, true)
|
||||
} else {
|
||||
await saveServerLog({ event: error.error, configuration })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { queueAndBuild }
|
||||
@@ -1,94 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const shell = require('shelljs')
|
||||
const jsonwebtoken = require('jsonwebtoken')
|
||||
const { docker } = require('./docker')
|
||||
const User = require('../models/User')
|
||||
const algorithm = 'aes-256-cbc'
|
||||
const key = process.env.SECRETS_ENCRYPTION_KEY
|
||||
|
||||
function delay (t) {
|
||||
return new Promise(function (resolve) {
|
||||
setTimeout(function () {
|
||||
resolve('OK')
|
||||
}, t)
|
||||
})
|
||||
}
|
||||
|
||||
async function verifyUserId (authorization) {
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jsonwebtoken.verify(token, process.env.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
function execShellAsync (cmd, opts = {}) {
|
||||
try {
|
||||
return new Promise(function (resolve, reject) {
|
||||
shell.config.silent = true
|
||||
shell.exec(cmd, opts, function (code, stdout, stderr) {
|
||||
if (code !== 0) return reject(new Error(stderr))
|
||||
return resolve(stdout)
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
return new Error('Oops')
|
||||
}
|
||||
}
|
||||
function cleanupTmp (dir) {
|
||||
if (dir !== '/') shell.rm('-fr', dir)
|
||||
}
|
||||
|
||||
async function checkImageAvailable (name) {
|
||||
let cacheAvailable = false
|
||||
try {
|
||||
await docker.engine.getImage(name).get()
|
||||
cacheAvailable = true
|
||||
} catch (e) {
|
||||
// Cache image not found
|
||||
}
|
||||
return cacheAvailable
|
||||
}
|
||||
|
||||
function encryptData (text) {
|
||||
const iv = crypto.randomBytes(16)
|
||||
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
|
||||
let encrypted = cipher.update(text)
|
||||
encrypted = Buffer.concat([encrypted, cipher.final()])
|
||||
return { iv: iv.toString('hex'), encryptedData: encrypted.toString('hex') }
|
||||
}
|
||||
|
||||
function decryptData (text) {
|
||||
const iv = Buffer.from(text.iv, 'hex')
|
||||
const encryptedText = Buffer.from(text.encryptedData, 'hex')
|
||||
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
|
||||
let decrypted = decipher.update(encryptedText)
|
||||
decrypted = Buffer.concat([decrypted, decipher.final()])
|
||||
return decrypted.toString()
|
||||
}
|
||||
|
||||
function createToken (payload) {
|
||||
const { uuid } = payload
|
||||
return jsonwebtoken.sign({}, process.env.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolify',
|
||||
issuer: 'coolify',
|
||||
jwtid: uuid,
|
||||
subject: `User:${uuid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
delay,
|
||||
createToken,
|
||||
execShellAsync,
|
||||
cleanupTmp,
|
||||
checkImageAvailable,
|
||||
encryptData,
|
||||
decryptData,
|
||||
verifyUserId
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
const Dockerode = require('dockerode')
|
||||
const { saveAppLog } = require('./logging')
|
||||
|
||||
const docker = {
|
||||
engine: new Dockerode({
|
||||
socketPath: process.env.DOCKER_ENGINE
|
||||
}),
|
||||
network: process.env.DOCKER_NETWORK
|
||||
}
|
||||
async function streamEvents (stream, configuration) {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress)
|
||||
function onFinished (err, res) {
|
||||
if (err) reject(err)
|
||||
resolve(res)
|
||||
}
|
||||
function onProgress (event) {
|
||||
if (event.error) {
|
||||
reject(event.error)
|
||||
return
|
||||
}
|
||||
saveAppLog(event.stream, configuration)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw { error, type: 'app' }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { streamEvents, docker }
|
||||
@@ -1,55 +0,0 @@
|
||||
const ApplicationLog = require('../models/Logs/Application')
|
||||
const ServerLog = require('../models/Logs/Server')
|
||||
const dayjs = require('dayjs')
|
||||
|
||||
function generateTimestamp () {
|
||||
return `${dayjs().format('YYYY-MM-DD HH:mm:ss.SSS')} `
|
||||
}
|
||||
|
||||
async function saveAppLog (event, configuration, isError) {
|
||||
try {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
if (isError) {
|
||||
// console.log(event, config, isError)
|
||||
let clearedEvent = null
|
||||
|
||||
if (event.error) clearedEvent = '[ERROR] ' + generateTimestamp() + event.error.replace(/(\r\n|\n|\r)/gm, '')
|
||||
else if (event) clearedEvent = '[ERROR] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
} else {
|
||||
if (event && event !== '\n') {
|
||||
const clearedEvent = '[INFO] ' + generateTimestamp() + event.replace(/(\r\n|\n|\r)/gm, '')
|
||||
try {
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: clearedEvent }).save()
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
async function saveServerLog ({ event, configuration, type }) {
|
||||
if (configuration) {
|
||||
const deployId = configuration.general.deployId
|
||||
const repoId = configuration.repository.id
|
||||
const branch = configuration.repository.branch
|
||||
await new ApplicationLog({ repoId, branch, deployId, event: `[SERVER ERROR 😖]: ${event}` }).save()
|
||||
}
|
||||
await new ServerLog({ event, type }).save()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveAppLog,
|
||||
saveServerLog
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const deploymentSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
nickname: { type: String, required: true },
|
||||
repoId: { type: Number, required: true },
|
||||
organization: { type: String, required: true },
|
||||
name: { type: String, required: true },
|
||||
branch: { type: String, required: true },
|
||||
domain: { type: String, required: true },
|
||||
progress: { type: String, require: true, default: 'queued' }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('deployment', deploymentSchema)
|
||||
@@ -1,10 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
deployId: { type: String, required: true },
|
||||
event: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-application', logSchema)
|
||||
@@ -1,13 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
const { version } = require('../../../package.json')
|
||||
const logSchema = mongoose.Schema(
|
||||
{
|
||||
version: { type: String, required: true, default: version },
|
||||
type: { type: String, required: true, enum: ['API', 'UPGRADE-P-1', 'UPGRADE-P-2'], default: 'API' },
|
||||
event: { type: String, required: true },
|
||||
seen: { type: Boolean, required: true, default: false }
|
||||
},
|
||||
{ timestamps: { createdAt: 'createdAt', updatedAt: false } }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('logs-server', logSchema)
|
||||
@@ -1,11 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const settingsSchema = mongoose.Schema(
|
||||
{
|
||||
applicationName: { type: String, required: true, default: 'coolify' },
|
||||
allowRegistration: { type: Boolean, required: true, default: false }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('settings', settingsSchema)
|
||||
@@ -1,12 +0,0 @@
|
||||
const mongoose = require('mongoose')
|
||||
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
email: { type: String, required: true },
|
||||
avatar: { type: String },
|
||||
uid: { type: String, required: true }
|
||||
},
|
||||
{ timestamps: true }
|
||||
)
|
||||
|
||||
module.exports = mongoose.model('user', userSchema)
|
||||
@@ -1,15 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
const path = `${configuration.general.workdir}/${configuration.build.directory ? configuration.build.directory : ''}`
|
||||
if (fs.stat(`${path}/Dockerfile`)) {
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: path },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
} else {
|
||||
throw { error: 'No custom dockerfile found.', type: 'app' }
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../libs/docker')
|
||||
const buildImageNodeDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY ${configuration.build.directory} ./`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`RUN ${configuration.build.command.build}`
|
||||
].join('\n')
|
||||
}
|
||||
async function buildImage (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, buildImageNodeDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildImage
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
const static = require('./static')
|
||||
const nodejs = require('./nodejs')
|
||||
const php = require('./php')
|
||||
const custom = require('./custom')
|
||||
|
||||
module.exports = { static, nodejs, php, custom }
|
||||
@@ -1,26 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
const publishNodejsDocker = (configuration) => {
|
||||
return [
|
||||
'FROM node:lts',
|
||||
'WORKDIR /usr/src/app',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ${configuration.build.directory} ./`,
|
||||
configuration.build.command.installation && `RUN ${configuration.build.command.installation}`,
|
||||
`EXPOSE ${configuration.publish.port}`,
|
||||
'CMD [ "yarn", "start" ]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishNodejsDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
const publishPHPDocker = (configuration) => {
|
||||
return [
|
||||
'FROM php:apache',
|
||||
'WORKDIR /usr/src/app',
|
||||
`COPY .${configuration.build.directory} /var/www/html`,
|
||||
'EXPOSE 80',
|
||||
' CMD ["apache2-foreground"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishPHPDocker(configuration))
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
const fs = require('fs').promises
|
||||
const { buildImage } = require('../helpers')
|
||||
const { streamEvents, docker } = require('../../libs/docker')
|
||||
|
||||
const publishStaticDocker = (configuration) => {
|
||||
return [
|
||||
'FROM nginx:stable-alpine',
|
||||
'COPY nginx.conf /etc/nginx/nginx.conf',
|
||||
'WORKDIR /usr/share/nginx/html',
|
||||
configuration.build.command.build
|
||||
? `COPY --from=${configuration.build.container.name}:${configuration.build.container.tag} /usr/src/app/${configuration.publish.directory} ./`
|
||||
: `COPY ${configuration.build.directory} ./`,
|
||||
'EXPOSE 80',
|
||||
'CMD ["nginx", "-g", "daemon off;"]'
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
module.exports = async function (configuration) {
|
||||
if (configuration.build.command.build) await buildImage(configuration)
|
||||
await fs.writeFile(`${configuration.general.workdir}/Dockerfile`, publishStaticDocker(configuration))
|
||||
|
||||
const stream = await docker.engine.buildImage(
|
||||
{ src: ['.'], context: configuration.general.workdir },
|
||||
{ t: `${configuration.build.container.name}:${configuration.build.container.tag}` }
|
||||
)
|
||||
await streamEvents(stream, configuration)
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
const fp = require('fastify-plugin')
|
||||
const User = require('../models/User')
|
||||
module.exports = fp(async function (fastify, options, next) {
|
||||
fastify.register(require('fastify-jwt'), {
|
||||
secret: fastify.config.JWT_SIGN_KEY
|
||||
})
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
try {
|
||||
const { jti } = await request.jwtVerify()
|
||||
const found = await User.findOne({ uid: jti })
|
||||
if (found) {
|
||||
return true
|
||||
} else {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
} catch (err) {
|
||||
reply.code(401).send('Unauthorized')
|
||||
}
|
||||
})
|
||||
next()
|
||||
})
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
const { verifyUserId } = require('../../../libs/common')
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
if (!await verifyUserId(request.headers.authorization)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
let foundDomain = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fastify.config.DOMAIN === configuration.publish.domain) foundDomain = true
|
||||
if (foundDomain) {
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
return { message: 'OK' }
|
||||
})
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
|
||||
const { verifyUserId, cleanupTmp, execShellAsync } = require('../../../../libs/common')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const { queueAndBuild } = require('../../../../libs/applications')
|
||||
const { setDefaultConfiguration } = require('../../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../../libs/docker')
|
||||
const cloneRepository = require('../../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// const postSchema = {
|
||||
// body: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// ref: { type: "string" },
|
||||
// repository: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// full_name: { type: "string" },
|
||||
// },
|
||||
// required: ["id", "full_name"],
|
||||
// },
|
||||
// installation: {
|
||||
// type: "object",
|
||||
// properties: {
|
||||
// id: { type: "number" },
|
||||
// },
|
||||
// required: ["id"],
|
||||
// },
|
||||
// },
|
||||
// required: ["ref", "repository", "installation"],
|
||||
// },
|
||||
// };
|
||||
fastify.post('/', async (request, reply) => {
|
||||
if (!await verifyUserId(request.headers.authorization)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
const configuration = setDefaultConfiguration(request.body)
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
await cloneRepository(configuration)
|
||||
|
||||
let foundService = false
|
||||
let foundDomain = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
// Base service configuration changed
|
||||
if (!running.build.container.baseSHA || running.build.container.baseSHA !== configuration.build.container.baseSHA) {
|
||||
configChanged = true
|
||||
}
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
|
||||
foundService = true
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
// If only the configuration changed
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
// If only the image changed
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
// If build pack changed, forceUpdate the service
|
||||
if (running.build.pack !== configuration.build.pack) forceUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundDomain) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Domain already in use.' })
|
||||
return
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
} else {
|
||||
if (foundService && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, services, configChanged, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.', nickname: configuration.general.nickname, name: configuration.build.container.name })
|
||||
})
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
const ApplicationLog = require('../../../../models/Logs/Application')
|
||||
const Deployment = require('../../../../models/Deployment')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const relativeTime = require('dayjs/plugin/relativeTime')
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
const getLogSchema = {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
repoId: { type: 'string' },
|
||||
branch: { type: 'string' }
|
||||
},
|
||||
required: ['repoId', 'branch']
|
||||
}
|
||||
}
|
||||
fastify.get('/', { schema: getLogSchema }, async (request, reply) => {
|
||||
const { repoId, branch, page } = request.query
|
||||
const onePage = 5
|
||||
const show = Number(page) * onePage || 5
|
||||
const deploy = await Deployment.find({ repoId, branch })
|
||||
.select('-_id -__v -repoId')
|
||||
.sort({ createdAt: 'desc' })
|
||||
.limit(show)
|
||||
|
||||
const finalLogs = deploy.map(d => {
|
||||
const finalLogs = { ...d._doc }
|
||||
|
||||
const updatedAt = dayjs(d.updatedAt).utc()
|
||||
|
||||
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000
|
||||
finalLogs.since = updatedAt.fromNow()
|
||||
|
||||
return finalLogs
|
||||
})
|
||||
return finalLogs
|
||||
})
|
||||
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const logs = await ApplicationLog.find({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'asc' })
|
||||
|
||||
const deploy = await Deployment.findOne({ deployId })
|
||||
.select('-_id -__v')
|
||||
.sort({ createdAt: 'desc' })
|
||||
|
||||
const finalLogs = {}
|
||||
finalLogs.progress = deploy.progress
|
||||
finalLogs.events = logs.map(log => log.event)
|
||||
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt))
|
||||
return finalLogs
|
||||
} catch (e) {
|
||||
throw new Error('No logs found')
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const { name } = request.query
|
||||
const service = await docker.engine.getService(`${name}_${name}`)
|
||||
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true })).toString().split('\n').map(l => l.slice(8)).filter((a) => a)
|
||||
return { logs }
|
||||
})
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const ApplicationLog = require('../../../models/Logs/Application')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { organization, name, branch } = request.body
|
||||
let found = false
|
||||
try {
|
||||
(await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application').map(s => {
|
||||
const running = JSON.parse(s.Spec.Labels.configuration)
|
||||
if (running.repository.organization === organization &&
|
||||
running.repository.name === name &&
|
||||
running.repository.branch === branch) {
|
||||
found = running
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
const deploys = await Deployment.find({ organization, branch, name })
|
||||
for (const deploy of deploys) {
|
||||
await ApplicationLog.deleteMany({ deployId: deploy.deployId })
|
||||
await Deployment.deleteMany({ deployId: deploy.deployId })
|
||||
}
|
||||
await execShellAsync(`docker stack rm ${found.build.container.name}`)
|
||||
reply.code(200).send({ organization, name, branch })
|
||||
} else {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
} catch (error) {
|
||||
reply.code(500).send({ message: 'Nothing to do.' })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
const { docker } = require('../../libs/docker')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// const getConfig = {
|
||||
// querystring: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// repoId: { type: 'number' },
|
||||
// branch: { type: 'string' }
|
||||
// },
|
||||
// required: ['repoId', 'branch']
|
||||
// }
|
||||
// }
|
||||
|
||||
// const saveConfig = {
|
||||
// body: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// build: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// baseDir: { type: 'string' },
|
||||
// installCmd: { type: 'string' },
|
||||
// buildCmd: { type: 'string' }
|
||||
// },
|
||||
// required: ['baseDir', 'installCmd', 'buildCmd']
|
||||
// },
|
||||
// publish: {
|
||||
// type: 'object',
|
||||
// properties: {
|
||||
// publishDir: { type: 'string' },
|
||||
// domain: { type: 'string' },
|
||||
// pathPrefix: { type: 'string' },
|
||||
// port: { type: 'number' }
|
||||
// },
|
||||
// required: ['publishDir', 'domain', 'pathPrefix', 'port']
|
||||
// },
|
||||
// previewDeploy: { type: 'boolean' },
|
||||
// branch: { type: 'string' },
|
||||
// repoId: { type: 'number' },
|
||||
// buildPack: { type: 'string' },
|
||||
// fullName: { type: 'string' },
|
||||
// installationId: { type: 'number' }
|
||||
// },
|
||||
// required: ['build', 'publish', 'previewDeploy', 'branch', 'repoId', 'buildPack', 'fullName', 'installationId']
|
||||
// }
|
||||
// }
|
||||
|
||||
// fastify.get("/all", async (request, reply) => {
|
||||
// return await Config.find().select("-_id -__v");
|
||||
// });
|
||||
|
||||
// fastify.get("/", { schema: getConfig }, async (request, reply) => {
|
||||
// const { repoId, branch } = request.query;
|
||||
// return await Config.findOne({ repoId, branch }).select("-_id -__v");
|
||||
// });
|
||||
|
||||
fastify.post('/', async (request, reply) => {
|
||||
const { name, organization, branch } = request.body
|
||||
const services = await docker.engine.listServices()
|
||||
const applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
const found = applications.find(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
if (branch) {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization && configuration.repository.branch === branch) {
|
||||
return r
|
||||
}
|
||||
} else {
|
||||
if (configuration.repository.name === name && configuration.repository.organization === organization) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
return null
|
||||
})
|
||||
if (found) {
|
||||
return JSON.parse(found.Spec.Labels.configuration)
|
||||
} else {
|
||||
reply.code(500).send({ message: 'No configuration found.' })
|
||||
}
|
||||
})
|
||||
|
||||
// fastify.delete("/", async (request, reply) => {
|
||||
// const { repoId, branch } = request.body;
|
||||
|
||||
// const deploys = await Deployment.find({ repoId, branch })
|
||||
// const found = deploys.filter(d => d.progress !== 'done' && d.progress !== 'failed')
|
||||
// if (found.length > 0) {
|
||||
// throw new Error('Deployment inprogress, cannot delete now.');
|
||||
// }
|
||||
|
||||
// const config = await Config.findOneAndDelete({ repoId, branch })
|
||||
// for (const deploy of deploys) {
|
||||
// await ApplicationLog.findOneAndRemove({ deployId: deploy.deployId });
|
||||
// }
|
||||
// const secrets = await Secret.find({ repoId, branch });
|
||||
// for (const secret of secrets) {
|
||||
// await Secret.findByIdAndRemove(secret._id);
|
||||
// }
|
||||
// await execShellAsync(`docker stack rm ${config.containerName}`);
|
||||
// return { message: 'Deleted application and related configurations.' };
|
||||
// });
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const ServerLog = require('../../../models/Logs/Server')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
const latestDeployments = await Deployment.aggregate([
|
||||
{
|
||||
$sort: { createdAt: -1 }
|
||||
},
|
||||
{
|
||||
$group:
|
||||
{
|
||||
_id: {
|
||||
repoId: '$repoId',
|
||||
branch: '$branch'
|
||||
},
|
||||
createdAt: { $last: '$createdAt' },
|
||||
progress: { $first: '$progress' }
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
const serverLogs = await ServerLog.find()
|
||||
const services = await docker.engine.listServices()
|
||||
|
||||
let applications = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application' && r.Spec.Labels.configuration)
|
||||
let databases = services.filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && r.Spec.Labels.configuration)
|
||||
applications = applications.map(r => {
|
||||
if (JSON.parse(r.Spec.Labels.configuration)) {
|
||||
const configuration = JSON.parse(r.Spec.Labels.configuration)
|
||||
const status = latestDeployments.find(l => configuration.repository.id === l._id.repoId && configuration.repository.branch === l._id.branch)
|
||||
if (status && status.progress) r.progress = status.progress
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
}
|
||||
return {}
|
||||
})
|
||||
databases = databases.map(r => {
|
||||
const configuration = r.Spec.Labels.configuration ? JSON.parse(r.Spec.Labels.configuration) : null
|
||||
r.Spec.Labels.configuration = configuration
|
||||
return r
|
||||
})
|
||||
applications = [...new Map(applications.map(item => [item.Spec.Labels.configuration.publish.domain, item])).values()]
|
||||
return {
|
||||
serverLogs,
|
||||
applications: {
|
||||
deployed: applications
|
||||
},
|
||||
databases: {
|
||||
deployed: databases
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && error.errno === -2) {
|
||||
throw new Error(`Docker service unavailable at ${error.address}.`)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
const yaml = require('js-yaml')
|
||||
const fs = require('fs').promises
|
||||
const cuid = require('cuid')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
|
||||
const { uniqueNamesGenerator, adjectives, colors, animals } = require('unique-names-generator')
|
||||
const generator = require('generate-password')
|
||||
|
||||
function getUniq () {
|
||||
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 })
|
||||
}
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/:deployId', async (request, reply) => {
|
||||
const { deployId } = request.params
|
||||
try {
|
||||
const database = (await docker.engine.listServices()).find(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'database' && JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId)
|
||||
if (database) {
|
||||
const jsonEnvs = {}
|
||||
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
|
||||
const s = d.split('=')
|
||||
jsonEnvs[s[0]] = s[1]
|
||||
}
|
||||
const payload = {
|
||||
config: JSON.parse(database.Spec.Labels.configuration),
|
||||
envs: jsonEnvs
|
||||
}
|
||||
reply.code(200).send(payload)
|
||||
} else {
|
||||
throw new Error()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error('No database found?')
|
||||
}
|
||||
})
|
||||
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: { type: 'string', enum: ['mongodb', 'postgresql', 'mysql', 'couchdb'] }
|
||||
},
|
||||
required: ['type']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.post('/deploy', { schema: postSchema }, async (request, reply) => {
|
||||
let { type, defaultDatabaseName } = request.body
|
||||
const passwords = generator.generateMultiple(2, {
|
||||
length: 24,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
const usernames = generator.generateMultiple(2, {
|
||||
length: 10,
|
||||
numbers: true,
|
||||
strict: true
|
||||
})
|
||||
// TODO: Query for existing db with the same name
|
||||
const nickname = getUniq()
|
||||
|
||||
if (!defaultDatabaseName) defaultDatabaseName = nickname
|
||||
|
||||
reply.code(201).send({ message: 'Deploying.' })
|
||||
// TODO: Persistent volume, custom inputs
|
||||
const deployId = cuid()
|
||||
const configuration = {
|
||||
general: {
|
||||
workdir: `/tmp/${deployId}`,
|
||||
deployId,
|
||||
nickname,
|
||||
type
|
||||
},
|
||||
database: {
|
||||
usernames,
|
||||
passwords,
|
||||
defaultDatabaseName
|
||||
},
|
||||
deploy: {
|
||||
name: nickname
|
||||
}
|
||||
}
|
||||
let generateEnvs = {}
|
||||
let image = null
|
||||
let volume = null
|
||||
if (type === 'mongodb') {
|
||||
generateEnvs = {
|
||||
MONGODB_ROOT_PASSWORD: passwords[0],
|
||||
MONGODB_USERNAME: usernames[0],
|
||||
MONGODB_PASSWORD: passwords[1],
|
||||
MONGODB_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mongodb:4.4'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`
|
||||
} else if (type === 'postgresql') {
|
||||
generateEnvs = {
|
||||
POSTGRESQL_PASSWORD: passwords[0],
|
||||
POSTGRESQL_USERNAME: usernames[0],
|
||||
POSTGRESQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/postgresql:13.2.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`
|
||||
} else if (type === 'couchdb') {
|
||||
generateEnvs = {
|
||||
COUCHDB_PASSWORD: passwords[0],
|
||||
COUCHDB_USER: usernames[0]
|
||||
}
|
||||
image = 'bitnami/couchdb:3'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`
|
||||
} else if (type === 'mysql') {
|
||||
generateEnvs = {
|
||||
MYSQL_ROOT_PASSWORD: passwords[0],
|
||||
MYSQL_ROOT_USER: usernames[0],
|
||||
MYSQL_USER: usernames[1],
|
||||
MYSQL_PASSWORD: passwords[1],
|
||||
MYSQL_DATABASE: defaultDatabaseName
|
||||
}
|
||||
image = 'bitnami/mysql:8.0'
|
||||
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`
|
||||
}
|
||||
|
||||
const stack = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[configuration.general.deployId]: {
|
||||
image,
|
||||
networks: [`${docker.network}`],
|
||||
environment: generateEnvs,
|
||||
volumes: [volume],
|
||||
deploy: {
|
||||
replicas: 1,
|
||||
update_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
rollback_config: {
|
||||
parallelism: 0,
|
||||
delay: '10s',
|
||||
order: 'start-first'
|
||||
},
|
||||
labels: [
|
||||
'managedBy=coolify',
|
||||
'type=database',
|
||||
'configuration=' + JSON.stringify(configuration)
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[`${docker.network}`]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: {
|
||||
[`${configuration.general.deployId}-${type}-data`]: {
|
||||
external: true
|
||||
}
|
||||
}
|
||||
}
|
||||
await execShellAsync(`mkdir -p ${configuration.general.workdir}`)
|
||||
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack))
|
||||
await execShellAsync(
|
||||
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
|
||||
)
|
||||
})
|
||||
|
||||
fastify.delete('/:dbName', async (request, reply) => {
|
||||
const { dbName } = request.params
|
||||
await execShellAsync(`docker stack rm ${dbName}`)
|
||||
reply.code(200).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
const axios = require('axios')
|
||||
const User = require('../../../models/User')
|
||||
const Settings = require('../../../models/Settings')
|
||||
const cuid = require('cuid')
|
||||
const mongoose = require('mongoose')
|
||||
const jwt = require('jsonwebtoken')
|
||||
module.exports = async function (fastify) {
|
||||
const githubCodeSchema = {
|
||||
schema: {
|
||||
querystring: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: { type: 'string' }
|
||||
},
|
||||
required: ['code']
|
||||
}
|
||||
}
|
||||
}
|
||||
fastify.get('/app', { schema: githubCodeSchema }, async (request, reply) => {
|
||||
const { code } = request.query
|
||||
try {
|
||||
const { data } = await axios({
|
||||
method: 'post',
|
||||
url: `https://github.com/login/oauth/access_token?client_id=${fastify.config.VITE_GITHUB_APP_CLIENTID}&client_secret=${fastify.config.GITHUB_APP_CLIENT_SECRET}&code=${code}`,
|
||||
headers: {
|
||||
accept: 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
const token = data.access_token
|
||||
const githubAxios = axios.create({
|
||||
baseURL: 'https://api.github.com'
|
||||
})
|
||||
|
||||
githubAxios.defaults.headers.common.Accept = 'Application/json'
|
||||
githubAxios.defaults.headers.common.Authorization = `token ${token}`
|
||||
|
||||
try {
|
||||
let uid = cuid()
|
||||
const { avatar_url } = (await githubAxios.get('/user')).data // eslint-disable-line
|
||||
const email = (await githubAxios.get('/user/emails')).data.filter(
|
||||
(e) => e.primary
|
||||
)[0].email
|
||||
const settings = await Settings.findOne({ applicationName: 'coolify' })
|
||||
const registeredUsers = await User.find().countDocuments()
|
||||
const foundUser = await User.findOne({ email })
|
||||
if (foundUser) {
|
||||
await User.findOneAndUpdate(
|
||||
{ email },
|
||||
{ avatar: avatar_url },
|
||||
{ upsert: true, new: true }
|
||||
)
|
||||
uid = foundUser.uid
|
||||
} else {
|
||||
if (registeredUsers === 0) {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (!settings && registeredUsers > 0) {
|
||||
reply.code(500).send('Registration disabled, enable it in settings.')
|
||||
} else {
|
||||
if (!settings.allowRegistration) {
|
||||
reply.code(500).send('You are not allowed here!')
|
||||
} else {
|
||||
const newUser = new User({
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
email,
|
||||
avatar: avatar_url,
|
||||
uid
|
||||
})
|
||||
try {
|
||||
await newUser.save()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const jwtToken = jwt.sign({}, fastify.config.JWT_SIGN_KEY, {
|
||||
expiresIn: 15778800,
|
||||
algorithm: 'HS256',
|
||||
audience: 'coolLabs',
|
||||
issuer: 'coolLabs',
|
||||
jwtid: uid,
|
||||
subject: `User:${uid}`,
|
||||
notBefore: -1000
|
||||
})
|
||||
reply
|
||||
.code(200)
|
||||
.redirect(
|
||||
302,
|
||||
`/api/v1/login/github/success?jwtToken=${jwtToken}&ghToken=${token}`
|
||||
)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
reply.code(500).send({ success: false, error: e })
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
reply.code(500).send({ success: false, error: error.message })
|
||||
}
|
||||
})
|
||||
fastify.get('/success', async (request, reply) => {
|
||||
return reply.sendFile('bye.html')
|
||||
})
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
const Settings = require('../../../models/Settings')
|
||||
module.exports = async function (fastify) {
|
||||
const applicationName = 'coolify'
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
allowRegistration: { type: 'boolean' }
|
||||
},
|
||||
required: ['allowRegistration']
|
||||
}
|
||||
}
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
try {
|
||||
let settings = await Settings.findOne({ applicationName }).select('-_id -__v')
|
||||
// TODO: Should do better
|
||||
if (!settings) {
|
||||
settings = {
|
||||
applicationName,
|
||||
allowRegistration: false
|
||||
}
|
||||
}
|
||||
return {
|
||||
settings
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
try {
|
||||
const settings = await Settings.findOneAndUpdate(
|
||||
{ applicationName },
|
||||
{ applicationName, ...request.body },
|
||||
{ upsert: true, new: true }
|
||||
).select('-_id -__v')
|
||||
reply.code(201).send({ settings })
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
reply.code(200).send('NO')
|
||||
})
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
const { execShellAsync } = require('../../../libs/common')
|
||||
const { saveServerLog } = require('../../../libs/logging')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const upgradeP1 = await execShellAsync('bash ./install.sh upgrade-phase-1')
|
||||
await saveServerLog({ event: upgradeP1, type: 'UPGRADE-P-1' })
|
||||
reply.code(200).send('I\'m trying, okay?')
|
||||
const upgradeP2 = await execShellAsync('bash ./install.sh upgrade-phase-2')
|
||||
await saveServerLog({ event: upgradeP2, type: 'UPGRADE-P-2' })
|
||||
})
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
const User = require('../../models/User')
|
||||
const jwt = require('jsonwebtoken')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
fastify.get('/', async (request, reply) => {
|
||||
const { authorization } = request.headers
|
||||
if (!authorization) {
|
||||
reply.code(401).send({})
|
||||
return
|
||||
}
|
||||
const token = authorization.split(' ')[1]
|
||||
const verify = jwt.verify(token, fastify.config.JWT_SIGN_KEY)
|
||||
const found = await User.findOne({ uid: verify.jti })
|
||||
found ? reply.code(200).send({}) : reply.code(401).send({})
|
||||
})
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
const crypto = require('crypto')
|
||||
const { cleanupTmp, execShellAsync } = require('../../../libs/common')
|
||||
const Deployment = require('../../../models/Deployment')
|
||||
const { queueAndBuild } = require('../../../libs/applications')
|
||||
const { setDefaultConfiguration } = require('../../../libs/applications/configuration')
|
||||
const { docker } = require('../../../libs/docker')
|
||||
const cloneRepository = require('../../../libs/applications/github/cloneRepository')
|
||||
|
||||
module.exports = async function (fastify) {
|
||||
// TODO: Add this to fastify plugin
|
||||
const postSchema = {
|
||||
body: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
ref: { type: 'string' },
|
||||
repository: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' },
|
||||
full_name: { type: 'string' }
|
||||
},
|
||||
required: ['id', 'full_name']
|
||||
},
|
||||
installation: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'number' }
|
||||
},
|
||||
required: ['id']
|
||||
}
|
||||
},
|
||||
required: ['ref', 'repository', 'installation']
|
||||
}
|
||||
}
|
||||
fastify.post('/', { schema: postSchema }, async (request, reply) => {
|
||||
const hmac = crypto.createHmac('sha256', fastify.config.GITHUP_APP_WEBHOOK_SECRET)
|
||||
const digest = Buffer.from('sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'), 'utf8')
|
||||
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8')
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
reply.code(500).send({ error: 'Invalid request' })
|
||||
return
|
||||
}
|
||||
|
||||
if (request.headers['x-github-event'] !== 'push') {
|
||||
reply.code(500).send({ error: 'Not a push event.' })
|
||||
return
|
||||
}
|
||||
|
||||
const services = (await docker.engine.listServices()).filter(r => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
|
||||
|
||||
let configuration = services.find(r => {
|
||||
if (request.body.ref.startsWith('refs')) {
|
||||
const branch = request.body.ref.split('/')[2]
|
||||
if (
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
|
||||
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
|
||||
) {
|
||||
return r
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
})
|
||||
|
||||
if (!configuration) {
|
||||
reply.code(500).send({ error: 'No configuration found.' })
|
||||
return
|
||||
}
|
||||
|
||||
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration))
|
||||
|
||||
await cloneRepository(configuration)
|
||||
|
||||
let foundService = false
|
||||
let foundDomain = false
|
||||
let configChanged = false
|
||||
let imageChanged = false
|
||||
|
||||
let forceUpdate = false
|
||||
|
||||
for (const service of services) {
|
||||
const running = JSON.parse(service.Spec.Labels.configuration)
|
||||
if (running) {
|
||||
if (
|
||||
running.publish.domain === configuration.publish.domain &&
|
||||
running.repository.id !== configuration.repository.id &&
|
||||
running.repository.branch !== configuration.repository.branch
|
||||
) {
|
||||
foundDomain = true
|
||||
}
|
||||
if (running.repository.id === configuration.repository.id && running.repository.branch === configuration.repository.branch) {
|
||||
const state = await execShellAsync(`docker stack ps ${running.build.container.name} --format '{{ json . }}'`)
|
||||
const isError = state.split('\n').filter(n => n).map(s => JSON.parse(s)).filter(n => n.DesiredState !== 'Running')
|
||||
if (isError.length > 0) forceUpdate = true
|
||||
foundService = true
|
||||
|
||||
const runningWithoutContainer = JSON.parse(JSON.stringify(running))
|
||||
delete runningWithoutContainer.build.container
|
||||
|
||||
const configurationWithoutContainer = JSON.parse(JSON.stringify(configuration))
|
||||
delete configurationWithoutContainer.build.container
|
||||
|
||||
if (JSON.stringify(runningWithoutContainer.build) !== JSON.stringify(configurationWithoutContainer.build) || JSON.stringify(runningWithoutContainer.publish) !== JSON.stringify(configurationWithoutContainer.publish)) configChanged = true
|
||||
if (running.build.container.tag !== configuration.build.container.tag) imageChanged = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundDomain) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Domain already used.' })
|
||||
return
|
||||
}
|
||||
if (forceUpdate) {
|
||||
imageChanged = false
|
||||
configChanged = false
|
||||
} else {
|
||||
if (foundService && !imageChanged && !configChanged) {
|
||||
cleanupTmp(configuration.general.workdir)
|
||||
reply.code(500).send({ message: 'Nothing changed, no need to redeploy.' })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const alreadyQueued = await Deployment.find({
|
||||
repoId: configuration.repository.id,
|
||||
branch: configuration.repository.branch,
|
||||
organization: configuration.repository.organization,
|
||||
name: configuration.repository.name,
|
||||
domain: configuration.publish.domain,
|
||||
progress: { $in: ['queued', 'inprogress'] }
|
||||
})
|
||||
|
||||
if (alreadyQueued.length > 0) {
|
||||
reply.code(200).send({ message: 'Already in the queue.' })
|
||||
return
|
||||
}
|
||||
|
||||
queueAndBuild(configuration, services, configChanged, imageChanged)
|
||||
|
||||
reply.code(201).send({ message: 'Deployment queued.' })
|
||||
})
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: [
|
||||
'DOMAIN',
|
||||
'EMAIL',
|
||||
'VITE_GITHUB_APP_CLIENTID',
|
||||
'GITHUB_APP_CLIENT_SECRET',
|
||||
'GITHUB_APP_PRIVATE_KEY',
|
||||
'GITHUP_APP_WEBHOOK_SECRET',
|
||||
'JWT_SIGN_KEY',
|
||||
'SECRETS_ENCRYPTION_KEY'
|
||||
],
|
||||
properties: {
|
||||
DOMAIN: {
|
||||
type: 'string'
|
||||
},
|
||||
EMAIL: {
|
||||
type: 'string'
|
||||
},
|
||||
VITE_GITHUB_APP_CLIENTID: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_CLIENT_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUB_APP_PRIVATE_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
GITHUP_APP_WEBHOOK_SECRET: {
|
||||
type: 'string'
|
||||
},
|
||||
JWT_SIGN_KEY: {
|
||||
type: 'string'
|
||||
},
|
||||
DOCKER_ENGINE: {
|
||||
type: 'string',
|
||||
default: '/var/run/docker.sock'
|
||||
},
|
||||
DOCKER_NETWORK: {
|
||||
type: 'string',
|
||||
default: 'coollabs'
|
||||
},
|
||||
SECRETS_ENCRYPTION_KEY: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { schema }
|
||||
@@ -1,90 +0,0 @@
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const util = require('util')
|
||||
const { saveServerLog } = require('./libs/logging')
|
||||
const Deployment = require('./models/Deployment')
|
||||
const fastify = require('fastify')({
|
||||
logger: { level: 'error' }
|
||||
})
|
||||
const mongoose = require('mongoose')
|
||||
const path = require('path')
|
||||
const { schema } = require('./schema')
|
||||
|
||||
fastify.register(require('fastify-env'), {
|
||||
schema,
|
||||
dotenv: true
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../dist/')
|
||||
})
|
||||
|
||||
fastify.setNotFoundHandler(function (request, reply) {
|
||||
reply.sendFile('index.html')
|
||||
})
|
||||
} else {
|
||||
fastify.register(require('fastify-static'), {
|
||||
root: path.join(__dirname, '../public/')
|
||||
})
|
||||
}
|
||||
|
||||
fastify.register(require('./app'), { prefix: '/api/v1' })
|
||||
fastify.setErrorHandler(async (error, request, reply) => {
|
||||
console.log({ error })
|
||||
if (error.statusCode) {
|
||||
reply.status(error.statusCode).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
} else {
|
||||
reply.status(500).send({ message: error.message } || { message: 'Something is NOT okay. Are you okay?' })
|
||||
}
|
||||
await saveServerLog({ event: error })
|
||||
})
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
mongoose.connect(
|
||||
`mongodb://${process.env.MONGODB_USER}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DB}?authSource=${process.env.MONGODB_DB}&readPreference=primary&ssl=false`,
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
} else {
|
||||
mongoose.connect(
|
||||
'mongodb://localhost:27017/coolify?&readPreference=primary&ssl=false',
|
||||
{ useNewUrlParser: true, useUnifiedTopology: true, useFindAndModify: false }
|
||||
)
|
||||
}
|
||||
|
||||
mongoose.connection.on(
|
||||
'error',
|
||||
console.error.bind(console, 'connection error:')
|
||||
)
|
||||
mongoose.connection.once('open', async function () {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
fastify.listen(3000, '0.0.0.0')
|
||||
console.log('Coolify API is up and running in production.')
|
||||
} else {
|
||||
const logFile = fs.createWriteStream('api/development/console.log', { flags: 'w' })
|
||||
const logStdout = process.stdout
|
||||
|
||||
console.log = function (d) {
|
||||
logFile.write(`[INFO]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.error = function (d) {
|
||||
logFile.write(`[ERROR]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
console.warn = function (d) {
|
||||
logFile.write(`[WARN]: ${util.format(d)}\n`)
|
||||
logStdout.write(util.format(d) + '\n')
|
||||
}
|
||||
|
||||
fastify.listen(3001)
|
||||
console.log('Coolify API is up and running in development.')
|
||||
}
|
||||
// On start cleanup inprogress/queued deployments.
|
||||
const deployments = await Deployment.find({ progress: { $in: ['queued', 'inprogress'] } })
|
||||
for (const deployment of deployments) {
|
||||
await Deployment.findByIdAndUpdate(deployment._id, { $set: { progress: 'failed' } })
|
||||
}
|
||||
})
|
||||
9
apps/api/.eslintignore
Normal file
9
apps/api/.eslintignore
Normal file
@@ -0,0 +1,9 @@
|
||||
seed.js
|
||||
.DS_Store
|
||||
node_modules
|
||||
build
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
dev.db
|
||||
11
apps/api/.eslintrc
Normal file
11
apps/api/.eslintrc
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint", "prettier"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"prettier"
|
||||
]
|
||||
}
|
||||
11
apps/api/.gitignore
vendored
Normal file
11
apps/api/.gitignore
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
dist
|
||||
dev.db
|
||||
client
|
||||
0
apps/api/.prettierignore
Normal file
0
apps/api/.prettierignore
Normal file
6
apps/api/.prettierrc
Normal file
6
apps/api/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
0
apps/api/db/.gitkeep
Normal file
0
apps/api/db/.gitkeep
Normal file
7
apps/api/nodemon.json
Normal file
7
apps/api/nodemon.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ignore": ["src/**/*.test.ts"],
|
||||
"ext": "ts,mjs,json,graphql",
|
||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
|
||||
"legacyWatch": true
|
||||
}
|
||||
68
apps/api/package.json
Normal file
68
apps/api/package.json
Normal file
@@ -0,0 +1,68 @@
|
||||
{
|
||||
"name": "coolify-api",
|
||||
"description": "Coolify's Fastify API",
|
||||
"license": "AGPL-3.0",
|
||||
"scripts": {
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:studio": "prisma studio",
|
||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||
"dev": "nodemon",
|
||||
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.1.0",
|
||||
"@fastify/cookie": "7.1.0",
|
||||
"@fastify/cors": "8.0.0",
|
||||
"@fastify/env": "4.0.0",
|
||||
"@fastify/jwt": "6.3.1",
|
||||
"@fastify/static": "6.4.0",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.15.2",
|
||||
"axios": "0.27.2",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.1",
|
||||
"cabin": "9.1.2",
|
||||
"compare-versions": "4.1.3",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.3",
|
||||
"dockerode": "3.3.2",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"fastify": "4.2.1",
|
||||
"fastify-plugin": "4.0.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.1.0",
|
||||
"is-ip": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"node-forge": "1.3.1",
|
||||
"node-os-utils": "1.3.7",
|
||||
"p-queue": "7.2.0",
|
||||
"strip-ansi": "7.0.1",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.0.4",
|
||||
"@types/node-os-utils": "1.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.30.6",
|
||||
"@typescript-eslint/parser": "5.30.6",
|
||||
"esbuild": "0.14.49",
|
||||
"eslint": "8.19.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-prettier": "4.2.1",
|
||||
"nodemon": "2.0.19",
|
||||
"prettier": "2.7.1",
|
||||
"prisma": "3.15.2",
|
||||
"rimraf": "3.0.2",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "4.7.4"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.js"
|
||||
}
|
||||
}
|
||||
443
apps/api/prisma/migrations/20220131142425_init/migration.sql
Normal file
443
apps/api/prisma/migrations/20220131142425_init/migration.sql
Normal file
@@ -0,0 +1,443 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Permission" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"userId" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Permission_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
|
||||
CONSTRAINT "Permission_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT,
|
||||
FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "TeamInvitation" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"uid" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"teamId" TEXT NOT NULL,
|
||||
"teamName" TEXT NOT NULL,
|
||||
"permission" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DestinationDocker" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"network" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"engine" TEXT NOT NULL,
|
||||
"remoteEngine" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCoolifyProxyUsed" BOOLEAN DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GithubApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"appId" INTEGER,
|
||||
"installationId" INTEGER,
|
||||
"clientId" TEXT,
|
||||
"clientSecret" TEXT,
|
||||
"webhookSecret" TEXT,
|
||||
"privateKey" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "GitlabApp" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"oauthId" INTEGER NOT NULL,
|
||||
"groupName" TEXT,
|
||||
"deployKeyId" INTEGER,
|
||||
"privateSshKey" TEXT,
|
||||
"publicSshKey" TEXT,
|
||||
"webhookToken" TEXT,
|
||||
"appId" TEXT,
|
||||
"appSecret" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Database" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"defaultDatabase" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"dbUser" TEXT,
|
||||
"dbUserPassword" TEXT,
|
||||
"rootUser" TEXT,
|
||||
"rootUserPassword" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Database_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Minio" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"rootUser" TEXT NOT NULL,
|
||||
"rootUserPassword" TEXT NOT NULL,
|
||||
"publicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Minio_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Vscodeserver" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"password" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Vscodeserver_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_TeamToUser" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ApplicationToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Application" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitSourceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitSource" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GithubAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GithubApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_GitlabAppToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "GitlabApp" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DestinationDockerToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "DestinationDocker" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_DatabaseToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Database" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "_ServiceToTeam" (
|
||||
"A" TEXT NOT NULL,
|
||||
"B" TEXT NOT NULL,
|
||||
FOREIGN KEY ("A") REFERENCES "Service" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
FOREIGN KEY ("B") REFERENCES "Team" ("id") ON DELETE CASCADE ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_id_key" ON "User"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Application_fqdn_key" ON "Application"("fqdn");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_key" ON "Secret"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DestinationDocker_network_key" ON "DestinationDocker"("network");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GithubApp_name_key" ON "GithubApp"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_oauthId_key" ON "GitlabApp"("oauthId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GitlabApp_groupName_key" ON "GitlabApp"("groupName");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Minio_serviceId_key" ON "Minio"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Vscodeserver_serviceId_key" ON "Vscodeserver"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_TeamToUser_AB_unique" ON "_TeamToUser"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_TeamToUser_B_index" ON "_TeamToUser"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ApplicationToTeam_AB_unique" ON "_ApplicationToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ApplicationToTeam_B_index" ON "_ApplicationToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitSourceToTeam_AB_unique" ON "_GitSourceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitSourceToTeam_B_index" ON "_GitSourceToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GithubAppToTeam_AB_unique" ON "_GithubAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GithubAppToTeam_B_index" ON "_GithubAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_GitlabAppToTeam_AB_unique" ON "_GitlabAppToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_GitlabAppToTeam_B_index" ON "_GitlabAppToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DestinationDockerToTeam_AB_unique" ON "_DestinationDockerToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DestinationDockerToTeam_B_index" ON "_DestinationDockerToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_DatabaseToTeam_AB_unique" ON "_DatabaseToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_DatabaseToTeam_B_index" ON "_DatabaseToTeam"("B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "_ServiceToTeam_AB_unique" ON "_ServiceToTeam"("A", "B");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "_ServiceToTeam_B_index" ON "_ServiceToTeam"("B");
|
||||
@@ -0,0 +1,28 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Team" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"serviceId" TEXT
|
||||
);
|
||||
INSERT INTO "new_Team" ("createdAt", "databaseId", "id", "name", "serviceId", "updatedAt") SELECT "createdAt", "databaseId", "id", "name", "serviceId", "updatedAt" FROM "Team";
|
||||
DROP TABLE "Team";
|
||||
ALTER TABLE "new_Team" RENAME TO "Team";
|
||||
CREATE TABLE "new_DatabaseSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
"isPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"appendOnly" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "DatabaseSettings_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DatabaseSettings" ("createdAt", "databaseId", "id", "isPublic", "updatedAt") SELECT "createdAt", "databaseId", "id", "isPublic", "updatedAt" FROM "DatabaseSettings";
|
||||
DROP TABLE "DatabaseSettings";
|
||||
ALTER TABLE "new_DatabaseSettings" RENAME TO "DatabaseSettings";
|
||||
CREATE UNIQUE INDEX "DatabaseSettings_databaseId_key" ON "DatabaseSettings"("databaseId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[name,applicationId]` on the table `Secret` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "Secret_name_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_key" ON "Secret"("name", "applicationId");
|
||||
@@ -0,0 +1,47 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Secret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"isPRMRSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isBuildSecret" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
CONSTRAINT "Secret_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Secret" ("applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value") SELECT "applicationId", "createdAt", "id", "isBuildSecret", "name", "updatedAt", "value" FROM "Secret";
|
||||
DROP TABLE "Secret";
|
||||
ALTER TABLE "new_Secret" RENAME TO "Secret";
|
||||
CREATE UNIQUE INDEX "Secret_name_applicationId_isPRMRSecret_key" ON "Secret"("name", "applicationId", "isPRMRSecret");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyHash" TEXT;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServiceSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
CONSTRAINT "ServiceSecret_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServiceSecret_name_serviceId_key" ON "ServiceSecret"("name", "serviceId");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt") SELECT "applicationId", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "phpModules" TEXT;
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationPersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_key" ON "ApplicationPersistentStorage"("applicationId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_path_key" ON "ApplicationPersistentStorage"("path");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Ghost" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"mariadbUser" TEXT NOT NULL,
|
||||
"mariadbPassword" TEXT NOT NULL,
|
||||
"mariadbRootUser" TEXT NOT NULL,
|
||||
"mariadbRootUserPassword" TEXT NOT NULL,
|
||||
"mariadbDatabase" TEXT,
|
||||
"mariadbPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Ghost_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Ghost_serviceId_key" ON "Ghost"("serviceId");
|
||||
@@ -0,0 +1,4 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonModule" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonVariable" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "pythonWSGI" TEXT;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "MeiliSearch" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"masterKey" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "MeiliSearch_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "MeiliSearch_serviceId_key" ON "MeiliSearch"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,5 @@
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_path_key";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "ApplicationPersistentStorage_applicationId_key";
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServicePersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerFileLocation" TEXT;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "denoMainFile" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "denoOptions" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "branch" TEXT;
|
||||
@@ -0,0 +1,17 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Umami" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"umamiAdminPassword" TEXT NOT NULL,
|
||||
"hashSalt" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Umami_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Umami_serviceId_key" ON "Umami"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "baseBuildImage" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "baseImage" TEXT;
|
||||
@@ -0,0 +1,16 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Hasura" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"graphQLAdminPassword" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Hasura_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Hasura_serviceId_key" ON "Hasura"("serviceId");
|
||||
@@ -0,0 +1,25 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT,
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Fider" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"jwtSecret" TEXT NOT NULL,
|
||||
"emailNoreply" TEXT,
|
||||
"emailMailgunApiKey" TEXT,
|
||||
"emailMailgunDomain" TEXT,
|
||||
"emailMailgunRegion" TEXT NOT NULL DEFAULT 'EU',
|
||||
"emailSmtpHost" TEXT,
|
||||
"emailSmtpPort" INTEGER,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Fider" ("createdAt", "emailMailgunApiKey", "emailMailgunDomain", "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt") SELECT "createdAt", "emailMailgunApiKey", "emailMailgunDomain", coalesce("emailMailgunRegion", 'EU') AS "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt" FROM "Fider";
|
||||
DROP TABLE "Fider";
|
||||
ALTER TABLE "new_Fider" RENAME TO "Fider";
|
||||
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,23 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_PlausibleAnalytics" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"email" TEXT,
|
||||
"username" TEXT,
|
||||
"password" TEXT NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"scriptName" TEXT NOT NULL DEFAULT 'plausible.js',
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_PlausibleAnalytics" ("createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username") SELECT "createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username" FROM "PlausibleAnalytics";
|
||||
DROP TABLE "PlausibleAnalytics";
|
||||
ALTER TABLE "new_PlausibleAnalytics" RENAME TO "PlausibleAnalytics";
|
||||
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
|
||||
"mysqlHost" TEXT,
|
||||
"mysqlPort" INTEGER,
|
||||
"mysqlUser" TEXT NOT NULL,
|
||||
"mysqlPassword" TEXT NOT NULL,
|
||||
"mysqlRootUser" TEXT NOT NULL,
|
||||
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Minio" ADD COLUMN "apiFqdn" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "deploymentType" TEXT;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user