mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 12:33:06 +00:00
Compare commits
973 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f55b861849 | ||
|
|
adf82c04ad | ||
|
|
1b80956fe8 | ||
|
|
de9da8caf9 | ||
|
|
967f42dd89 | ||
|
|
95e8b29fa2 | ||
|
|
2e3c815e53 | ||
|
|
132707caa7 | ||
|
|
0dad616c38 | ||
|
|
c0882dffde | ||
|
|
5e082c647c | ||
|
|
285c3c2f5d | ||
|
|
dcb29a80fe | ||
|
|
b45ad19732 | ||
|
|
f12d453b5f | ||
|
|
8a00b711be | ||
|
|
56204efc7a | ||
|
|
da638c270f | ||
|
|
ad4b974274 | ||
|
|
943a05edcc | ||
|
|
1a28e65e50 | ||
|
|
cd3af7fa39 | ||
|
|
8ccb0c88db | ||
|
|
127880cf8d | ||
|
|
2e56086661 | ||
|
|
a129be0dbd | ||
|
|
12c0760cb3 | ||
|
|
9d3ed85ffd | ||
|
|
850d57d0d2 | ||
|
|
7981bec1ed | ||
|
|
76373a8597 | ||
|
|
9913e7b70b | ||
|
|
a08bb25bfa | ||
|
|
28ec164bc2 | ||
|
|
3d5ea8629c | ||
|
|
4aaf59d034 | ||
|
|
14850476c7 | ||
|
|
bf5b6170fa | ||
|
|
6f91591448 | ||
|
|
3c723bcba2 | ||
|
|
e7dd13cffa | ||
|
|
ad91630faa | ||
|
|
57f746b584 | ||
|
|
a55720091c | ||
|
|
b461635834 | ||
|
|
1375580651 | ||
|
|
3d20433ad1 | ||
|
|
58447c6456 | ||
|
|
c6273e9177 | ||
|
|
ffdc158d44 | ||
|
|
876c81fad8 | ||
|
|
028ee6d7b1 | ||
|
|
ec00548f1b | ||
|
|
c4dc03e4a8 | ||
|
|
3a510a77ec | ||
|
|
98a785fced | ||
|
|
c48654160d | ||
|
|
55b80132c4 | ||
|
|
1f0c168936 | ||
|
|
6715bc750f | ||
|
|
04a48a626b | ||
|
|
2f9f0da7c6 | ||
|
|
513c4f9e29 | ||
|
|
3f078517a0 | ||
|
|
37036f0fca | ||
|
|
5789aadb5c | ||
|
|
a768ed718a | ||
|
|
9c6092f31f | ||
|
|
40d294a247 | ||
|
|
72844e4edc | ||
|
|
db0a71125a | ||
|
|
da244af39d | ||
|
|
067f502d3c | ||
|
|
fffc6b1e4e | ||
|
|
9121c6a078 | ||
|
|
9c4e581d8b | ||
|
|
dfadd31f46 | ||
|
|
0cfa6fff43 | ||
|
|
d61671c1a0 | ||
|
|
d4f10a9af3 | ||
|
|
03861af893 | ||
|
|
ae531c445d | ||
|
|
4b26aeef9a | ||
|
|
1e47b79b50 | ||
|
|
0c223dcec4 | ||
|
|
0f4536c3d3 | ||
|
|
f43c584463 | ||
|
|
91c558ec83 | ||
|
|
9d45ab3246 | ||
|
|
34ff6eb567 | ||
|
|
8793c00438 | ||
|
|
d7981d5c3e | ||
|
|
bcaae3b67b | ||
|
|
046d9f9597 | ||
|
|
81bd0301d2 | ||
|
|
530e7e494f | ||
|
|
d402fd5690 | ||
|
|
eebec3b92f | ||
|
|
211c6585fa | ||
|
|
e1b5c40ca0 | ||
|
|
747a9b521b | ||
|
|
c2d72ad309 | ||
|
|
596181b622 | ||
|
|
77c5270e1e | ||
|
|
a663c14df8 | ||
|
|
3bd9f00268 | ||
|
|
1aadda735d | ||
|
|
12035208e2 | ||
|
|
df8a9f673c | ||
|
|
aa5c8a2c56 | ||
|
|
a84540e6bb | ||
|
|
fb91b64063 | ||
|
|
94cc77ebca | ||
|
|
aac6981304 | ||
|
|
ca05828b68 | ||
|
|
8ec6b4c59c | ||
|
|
f1be5f5341 | ||
|
|
714c264002 | ||
|
|
eca58097ef | ||
|
|
281146e22b | ||
|
|
f3a19a5d02 | ||
|
|
9b9b6937f4 | ||
|
|
f54c0b7dff | ||
|
|
36c58ad286 | ||
|
|
a67f633259 | ||
|
|
f39a607c1a | ||
|
|
0cc67ed2e5 | ||
|
|
5f8402c645 | ||
|
|
3ab87cd11e | ||
|
|
d5620d305d | ||
|
|
35ebc5e842 | ||
|
|
66276be1d2 | ||
|
|
47c0d522db | ||
|
|
b654883d1a | ||
|
|
b4f9d29129 | ||
|
|
bec6b961f3 | ||
|
|
2ce8f34306 | ||
|
|
30d1ae59ec | ||
|
|
ac7d4e3645 | ||
|
|
868c4001f6 | ||
|
|
e99c44d967 | ||
|
|
48a877f160 | ||
|
|
cea894a8bd | ||
|
|
087e7b9311 | ||
|
|
39ba498293 | ||
|
|
fe7390bd4d | ||
|
|
75af551435 | ||
|
|
ae2d3ebb48 | ||
|
|
5ff6c53715 | ||
|
|
3c94723b23 | ||
|
|
c6a2e3e328 | ||
|
|
2dc5e10878 | ||
|
|
4086dfcf56 | ||
|
|
7937c2bab0 | ||
|
|
5ffa8e9936 | ||
|
|
c431cee517 | ||
|
|
375f17e728 | ||
|
|
d3f658c874 | ||
|
|
5e340a4cdd | ||
|
|
409a5b9f99 | ||
|
|
fba305020b | ||
|
|
bd4ce3ac45 | ||
|
|
733de60f7c | ||
|
|
c365a44e01 | ||
|
|
e94f450bf0 | ||
|
|
d5efc9ddde | ||
|
|
68895ba4a5 | ||
|
|
139aa7a0fc | ||
|
|
4955157e13 | ||
|
|
f2dd5cc75e | ||
|
|
2ad634dbc6 | ||
|
|
de13f65a24 | ||
|
|
8994dde8f0 | ||
|
|
b7303a0828 | ||
|
|
5bc330162a | ||
|
|
0ebc0217f3 | ||
|
|
95c810b80a | ||
|
|
82d7fb883d | ||
|
|
b96e710543 | ||
|
|
24e5e85225 | ||
|
|
7b8f81f1b2 | ||
|
|
62e60fc7ab | ||
|
|
ccd3d4aded | ||
|
|
f0cf155b5c | ||
|
|
b66f67d889 | ||
|
|
e5dc07bde1 | ||
|
|
a955eb0fec | ||
|
|
c070af9681 | ||
|
|
c15e060ef2 | ||
|
|
6d6f2454a7 | ||
|
|
9ff44ed46b | ||
|
|
adf3ef61b8 | ||
|
|
832107e0b8 | ||
|
|
0ea1e71808 | ||
|
|
3b9b3f8ffa | ||
|
|
fda8823050 | ||
|
|
b5756cb14f | ||
|
|
617d3dbe52 | ||
|
|
c18beb1c7c | ||
|
|
a6957b919c | ||
|
|
816a362534 | ||
|
|
7ce3ebde4e | ||
|
|
cc2f83c4d9 | ||
|
|
6ce492049e | ||
|
|
a7999de4b0 | ||
|
|
d4bdfabf19 | ||
|
|
85030ab804 | ||
|
|
2a9bd00a50 | ||
|
|
1c2d76e651 | ||
|
|
a97f7d225a | ||
|
|
2781848aac | ||
|
|
d179da2bee | ||
|
|
60e7922734 | ||
|
|
1ece37ec3c | ||
|
|
8dad865146 | ||
|
|
80d15e782b | ||
|
|
d24e4c6518 | ||
|
|
6def46544c | ||
|
|
d66bae32d3 | ||
|
|
1b753a4020 | ||
|
|
25e6a74a0a | ||
|
|
afde00a4be | ||
|
|
0022d380bb | ||
|
|
d80d2ab934 | ||
|
|
13c1734753 | ||
|
|
bf33d6c34e | ||
|
|
1b02f9bd5d | ||
|
|
9f63c645ff | ||
|
|
abf271fb68 | ||
|
|
363755c3bf | ||
|
|
ba2db666aa | ||
|
|
9f3677b694 | ||
|
|
e6024c997f | ||
|
|
3cb83e2286 | ||
|
|
780d03e5e1 | ||
|
|
214114e6ce | ||
|
|
274d3fe679 | ||
|
|
0ecf86d8a3 | ||
|
|
c2d4390a72 | ||
|
|
7627d59d43 | ||
|
|
71ce9a6b37 | ||
|
|
232018c925 | ||
|
|
9dfbbe58ff | ||
|
|
fa9738a2e0 | ||
|
|
94ecbc5921 | ||
|
|
3c68d317d7 | ||
|
|
56d4edfb9d | ||
|
|
c6c037ff17 | ||
|
|
44feba4d89 | ||
|
|
962f2c7380 | ||
|
|
22007426aa | ||
|
|
008e9a92d3 | ||
|
|
41139ee2ab | ||
|
|
845c40d23c | ||
|
|
a22f26c4c8 | ||
|
|
99ff020f56 | ||
|
|
f863b42b71 | ||
|
|
2e713b459e | ||
|
|
923241ce1e | ||
|
|
3a8929b9d7 | ||
|
|
eb92d39d40 | ||
|
|
bdc62a007e | ||
|
|
4b35db6291 | ||
|
|
c8282b215d | ||
|
|
c123669828 | ||
|
|
781fd0a1cd | ||
|
|
9bd99605fb | ||
|
|
dc626bd4f0 | ||
|
|
aa27aeafa1 | ||
|
|
cdb25cd0e9 | ||
|
|
dc2d15fd9c | ||
|
|
55cb788380 | ||
|
|
0f3b7fe643 | ||
|
|
4b812350a8 | ||
|
|
aec37164de | ||
|
|
dec02bd8db | ||
|
|
1bd6a8ed9e | ||
|
|
2030f714fa | ||
|
|
4416646954 | ||
|
|
52ba9dc02a | ||
|
|
dad3d42d14 | ||
|
|
0d12f3043b | ||
|
|
1225786fc0 | ||
|
|
71496d5229 | ||
|
|
eb0aa20fe1 | ||
|
|
c34de3d0a3 | ||
|
|
54e0a9fc28 | ||
|
|
4bcd034b3d | ||
|
|
111bd29cc8 | ||
|
|
e038865693 | ||
|
|
dfd29dc37a | ||
|
|
b0fcd23ca6 | ||
|
|
f80b1d31f5 | ||
|
|
811ea5b92a | ||
|
|
f9dfbd5800 | ||
|
|
88f1c36929 | ||
|
|
8bbe771f5b | ||
|
|
c578fa63e5 | ||
|
|
4448b86b93 | ||
|
|
17badf95dc | ||
|
|
a267ee40d2 | ||
|
|
8ef645b3c2 | ||
|
|
35625b22f5 | ||
|
|
221dcefd6c | ||
|
|
9c74a9c1db | ||
|
|
55fc3920fc | ||
|
|
5d60b5eb8b | ||
|
|
049d5166e8 | ||
|
|
f4019db3d1 | ||
|
|
9f3732d35b | ||
|
|
b4f17ac3c6 | ||
|
|
978e35d335 | ||
|
|
22cbbec960 | ||
|
|
21f3a70788 | ||
|
|
b4c6f80e1c | ||
|
|
e1198c42eb | ||
|
|
e09fdbcef0 | ||
|
|
b708e79929 | ||
|
|
cbaecff3b7 | ||
|
|
4f7d2630af | ||
|
|
92d3860240 | ||
|
|
3757d5da9f | ||
|
|
1d38a885bb | ||
|
|
dbd767e8f1 | ||
|
|
8b83c38127 | ||
|
|
f1ea01e709 | ||
|
|
12a1aeb0f8 | ||
|
|
413150012f | ||
|
|
8ef5604ce8 | ||
|
|
42e50c800b | ||
|
|
8fbd08003c | ||
|
|
877577efdb | ||
|
|
a6f457749b | ||
|
|
9afb713df1 | ||
|
|
8f660c0276 | ||
|
|
a7e86d9afd | ||
|
|
462eea90c0 | ||
|
|
79c30dfc91 | ||
|
|
410a78b366 | ||
|
|
065807a0bc | ||
|
|
1d93658e56 | ||
|
|
2b7865e6ea | ||
|
|
0cdba8c329 | ||
|
|
11b317b788 | ||
|
|
fb955e15f4 | ||
|
|
ae2d141f0d | ||
|
|
68c983923e | ||
|
|
bf252f7f20 | ||
|
|
324038486f | ||
|
|
bef5da49cf | ||
|
|
24e7f547fa | ||
|
|
3ee3ab0ad1 | ||
|
|
f734154da8 | ||
|
|
7a053ce697 | ||
|
|
25f250310e | ||
|
|
4eca05bbba | ||
|
|
45b0f791bb | ||
|
|
42415a81c1 | ||
|
|
6882e83d1e | ||
|
|
d4b7318413 | ||
|
|
a2b4d400af | ||
|
|
f07868d24e | ||
|
|
d46ee049f4 | ||
|
|
c62eda5627 | ||
|
|
7683164ed2 | ||
|
|
7090c16575 | ||
|
|
9e634fed13 | ||
|
|
9bb125cebd | ||
|
|
0c4850b91d | ||
|
|
0eb7688c4d | ||
|
|
f47cdb68d9 | ||
|
|
d3c3cded37 | ||
|
|
e91ea4ecbe | ||
|
|
680b20d199 | ||
|
|
ec97e04fd4 | ||
|
|
b0b2657fe0 | ||
|
|
d27426fd8f | ||
|
|
d8206c0e3e | ||
|
|
3f1841a188 | ||
|
|
cb478e0dc8 | ||
|
|
02c42a7e3a | ||
|
|
ef40f7349e | ||
|
|
86eebb35cb | ||
|
|
a901388887 | ||
|
|
6cd1c5de38 | ||
|
|
7489f172a1 | ||
|
|
702798c275 | ||
|
|
430d51866c | ||
|
|
9d08421f01 | ||
|
|
f4051874b2 | ||
|
|
bbe0690056 | ||
|
|
772c0d1e41 | ||
|
|
8eb9ca0260 | ||
|
|
bd27afe0da | ||
|
|
a3af21275a | ||
|
|
61eb155d13 | ||
|
|
7932c1c4a9 | ||
|
|
f776fb83e7 | ||
|
|
a97521aba2 | ||
|
|
d1c0fe503e | ||
|
|
ed02c1ae36 | ||
|
|
9a67cf7355 | ||
|
|
755eeda364 | ||
|
|
136dee7747 | ||
|
|
e4e8428855 | ||
|
|
de8dc021f9 | ||
|
|
991587f252 | ||
|
|
8dbcf257c4 | ||
|
|
0b067364a9 | ||
|
|
5367bd6134 | ||
|
|
92228c4379 | ||
|
|
fb2c7896b3 | ||
|
|
23265d9091 | ||
|
|
2c9bb0e767 | ||
|
|
f9e8400d83 | ||
|
|
927a13cd76 | ||
|
|
51b3293e69 | ||
|
|
3f76cadea9 | ||
|
|
6dbf53b558 | ||
|
|
22e937c798 | ||
|
|
ac5cc8b299 | ||
|
|
c588ab723b | ||
|
|
4b2dfc051d | ||
|
|
5238c83f3f | ||
|
|
90bb580e50 | ||
|
|
f40e142704 | ||
|
|
a67618675d | ||
|
|
4fe436e4d1 | ||
|
|
683b8c966f | ||
|
|
28377a156d | ||
|
|
3dcc4faabb | ||
|
|
60a033f93a | ||
|
|
436bd73786 | ||
|
|
5c69ff3339 | ||
|
|
2105b1e7c4 | ||
|
|
523004e5b2 | ||
|
|
5e02c386ec | ||
|
|
b4501fe52d | ||
|
|
3c29eaa1b1 | ||
|
|
ee67e163b1 | ||
|
|
9662bc29fb | ||
|
|
96f2660b98 | ||
|
|
20f594c66c | ||
|
|
2b8d59dca3 | ||
|
|
d44047d109 | ||
|
|
57c4d33bd3 | ||
|
|
7a5377efe0 | ||
|
|
91e7cffccc | ||
|
|
df31e47313 | ||
|
|
cb9586270c | ||
|
|
21dfa5227c | ||
|
|
9d15d2be77 | ||
|
|
929c02d31f | ||
|
|
846185dd42 | ||
|
|
7bc2299a8e | ||
|
|
d40e131bd8 | ||
|
|
552c7297bf | ||
|
|
3f5fd23955 | ||
|
|
8b8566251e | ||
|
|
6db47def8e | ||
|
|
1d0edc7b25 | ||
|
|
f9a417638a | ||
|
|
984fe01551 | ||
|
|
d0cb350687 | ||
|
|
5f51011ce1 | ||
|
|
9ca125ac55 | ||
|
|
360f4f8c27 | ||
|
|
6501f71bd6 | ||
|
|
bf6b799dba | ||
|
|
5f57279283 | ||
|
|
5ed3565520 | ||
|
|
513fa90b8a | ||
|
|
a4d9b9689b | ||
|
|
1c05c0dcbb | ||
|
|
a1b49a3a6b | ||
|
|
6f57298cbb | ||
|
|
d8ce673088 | ||
|
|
4cd7af7a74 | ||
|
|
49c61b5992 | ||
|
|
e44d0550d2 | ||
|
|
17f82109b6 | ||
|
|
2d8888ae9b | ||
|
|
4abe9c6fb2 | ||
|
|
f9d94fa660 | ||
|
|
eaa13f4990 | ||
|
|
01fd5901fe | ||
|
|
3d6adeffc4 | ||
|
|
9066952759 | ||
|
|
6dd7f6274a | ||
|
|
7a8fe6d152 | ||
|
|
be507be3a9 | ||
|
|
657b97f190 | ||
|
|
9d7745cd9b | ||
|
|
3668f83693 | ||
|
|
a2d5d99c1f | ||
|
|
f379ef6a3b | ||
|
|
510a748749 | ||
|
|
550150d685 | ||
|
|
011ea9659e | ||
|
|
6eca7d948e | ||
|
|
90e639f119 | ||
|
|
86ac6461d1 | ||
|
|
18a95bf9ab | ||
|
|
7949bbe66d | ||
|
|
4b603c452a | ||
|
|
837f0634b6 | ||
|
|
78076f7854 | ||
|
|
719350cee1 | ||
|
|
4f6be3e6f5 | ||
|
|
8e61e9fecb | ||
|
|
2083285d78 | ||
|
|
034e86e2cb | ||
|
|
f4a2d5c652 | ||
|
|
534ccd6bf6 | ||
|
|
c17064f853 | ||
|
|
1e1566082f | ||
|
|
449548654d | ||
|
|
6fc99524f0 | ||
|
|
051629fad3 | ||
|
|
f957008c1c | ||
|
|
98e1deec88 | ||
|
|
99127652af | ||
|
|
e9b9e9e82c | ||
|
|
2ed5c3746e | ||
|
|
8902056fdb | ||
|
|
defa6ff6e8 | ||
|
|
eed44e81be | ||
|
|
1951aec5ec | ||
|
|
9c4e0b4107 | ||
|
|
c8deac660d | ||
|
|
4cc5ec9bd0 | ||
|
|
c41bef2e81 | ||
|
|
5b735cf960 | ||
|
|
604e960aa9 | ||
|
|
6c465aa1f2 | ||
|
|
c266832fdc | ||
|
|
906d8d0413 | ||
|
|
cb05fd4a3c | ||
|
|
2eda24799b | ||
|
|
41e221f0cb | ||
|
|
f75af035bb | ||
|
|
e9e6449edf | ||
|
|
f09d76da35 | ||
|
|
40dfe0919b | ||
|
|
85990dd074 | ||
|
|
38acc16e1c | ||
|
|
b7cc4c1e92 | ||
|
|
1f232d96d8 | ||
|
|
83508f165d | ||
|
|
7cc58e7e84 | ||
|
|
31d9740aac | ||
|
|
69891a64a0 | ||
|
|
0940309600 | ||
|
|
a762b1ed60 | ||
|
|
1b9d9d3a8b | ||
|
|
d9908b3d61 | ||
|
|
c40b80436a | ||
|
|
8f1e352bcc | ||
|
|
18e769b5e5 | ||
|
|
27af6459b3 | ||
|
|
2c4bfab01a | ||
|
|
e689be552b | ||
|
|
ad80e7f48b | ||
|
|
d81b75b084 | ||
|
|
90f1431047 | ||
|
|
61ea7dabae | ||
|
|
5d9f5f4a7d | ||
|
|
f956f612d3 | ||
|
|
3f5108268d | ||
|
|
4c0dfc3f30 | ||
|
|
1670fe9b1c | ||
|
|
300b28c0f2 | ||
|
|
e7038961ef | ||
|
|
24e77a5211 | ||
|
|
9df039fbc2 | ||
|
|
143cd46a81 | ||
|
|
680e9871ed | ||
|
|
d5ece58f71 | ||
|
|
d7bbb5c4b7 | ||
|
|
cf9c991c79 | ||
|
|
0f0d96195d | ||
|
|
3a562bb714 | ||
|
|
6381ba8478 | ||
|
|
9e3c14841a | ||
|
|
1917091338 | ||
|
|
b1bb508554 | ||
|
|
0a68a48fc5 | ||
|
|
d3af6792d0 | ||
|
|
44dc3b743e | ||
|
|
b469d2832d | ||
|
|
d844026c29 | ||
|
|
21b4990652 | ||
|
|
39e24bdc97 | ||
|
|
bc66b98176 | ||
|
|
d6d3fb46cc | ||
|
|
4040b334f5 | ||
|
|
d7e72519ef | ||
|
|
c7752f0be9 | ||
|
|
0ffe28a733 | ||
|
|
56f24fe317 | ||
|
|
341cde2781 | ||
|
|
33bb8d434d | ||
|
|
9f813b7385 | ||
|
|
02a336a25d | ||
|
|
88ed1446f4 | ||
|
|
c69312f128 | ||
|
|
c5bcff0e10 | ||
|
|
871d1e2440 | ||
|
|
1619afb938 | ||
|
|
25528913f1 | ||
|
|
7df532fa72 | ||
|
|
ef91441c76 | ||
|
|
aa6c56b63d | ||
|
|
18e899d15e | ||
|
|
63fa8924ae | ||
|
|
0e13e3bd81 | ||
|
|
372c0ed457 | ||
|
|
071077200b | ||
|
|
65579a2861 | ||
|
|
bb7603ae2a | ||
|
|
cce67d274e | ||
|
|
794329dcad | ||
|
|
e36fda3ff1 | ||
|
|
3832d33259 | ||
|
|
1f40c2ccf8 | ||
|
|
7350524456 | ||
|
|
a1a973a873 | ||
|
|
f2a915700c | ||
|
|
e184f99617 | ||
|
|
ab07adb14f | ||
|
|
6535c68276 | ||
|
|
dde2772e52 | ||
|
|
4a8fd309c5 | ||
|
|
b416849d9c | ||
|
|
bc321d8ced | ||
|
|
ac72c19d22 | ||
|
|
67fc2fd3c0 | ||
|
|
4acc59204c | ||
|
|
07cadb59e0 | ||
|
|
6fa4741c81 | ||
|
|
f4bac2382c | ||
|
|
67b72220c0 | ||
|
|
feeb14ea47 | ||
|
|
bdfb6f5f46 | ||
|
|
53491e9eaa | ||
|
|
f720de65fa | ||
|
|
3d70162a8d | ||
|
|
d3a1bbc3d0 | ||
|
|
0078574ee6 | ||
|
|
7cfd313531 | ||
|
|
e7919e9a1b | ||
|
|
98073202e9 | ||
|
|
8dee345f85 | ||
|
|
9161882f33 | ||
|
|
eef313665b | ||
|
|
53e70fbfcb | ||
|
|
05a1721499 | ||
|
|
2f772080b8 | ||
|
|
a5548c080c | ||
|
|
7e0a1ecc80 | ||
|
|
3f2dcccc07 | ||
|
|
adc5965b32 | ||
|
|
45919fc0cf | ||
|
|
dd6f4c4844 | ||
|
|
bb47db033f | ||
|
|
111ea78693 | ||
|
|
c17253589a | ||
|
|
7e6156f5dd | ||
|
|
d5cfb63f52 | ||
|
|
cab15055e7 | ||
|
|
9185910171 | ||
|
|
b4892e0caf | ||
|
|
83e0cafef9 | ||
|
|
7cb75506c3 | ||
|
|
ac6970ad40 | ||
|
|
5a95cc236c | ||
|
|
95c942f477 | ||
|
|
6088f2e573 | ||
|
|
fc705746c0 | ||
|
|
8182359fe4 | ||
|
|
e7ae15162c | ||
|
|
12ca20432d | ||
|
|
8b7406e168 | ||
|
|
9d6317f782 | ||
|
|
d8bdb73140 | ||
|
|
476db15431 | ||
|
|
20ce356296 | ||
|
|
ea594dcbc6 | ||
|
|
021b9746a8 | ||
|
|
c4615ae557 | ||
|
|
95a5089bdc | ||
|
|
cef1fba281 | ||
|
|
5c7859a258 | ||
|
|
986cdae5b0 | ||
|
|
3b11e28d6c | ||
|
|
eba63e8e76 | ||
|
|
2fc65e3b42 | ||
|
|
7d504ab2bf | ||
|
|
216c7efd42 | ||
|
|
8c4149db16 | ||
|
|
20ac8f69ea | ||
|
|
1db3d7a6fb | ||
|
|
b1c1138cf8 | ||
|
|
00b1a4f174 | ||
|
|
86cc665b58 | ||
|
|
e26dd578ef | ||
|
|
f1f3217052 | ||
|
|
8f14fd89ef | ||
|
|
26e4d52a61 | ||
|
|
319c647147 | ||
|
|
f4cd93bd36 | ||
|
|
5a80bb1d2a | ||
|
|
1126dcacf5 | ||
|
|
bdf9a73d19 | ||
|
|
1f73b83a79 | ||
|
|
73bd62c51e | ||
|
|
9acd5c94e8 | ||
|
|
6e85eac14b | ||
|
|
936baf676e | ||
|
|
867f06d813 | ||
|
|
7f9f440789 | ||
|
|
5a15e64471 | ||
|
|
c9aecd51f3 | ||
|
|
6ca1d978d4 | ||
|
|
a18c73bd7c | ||
|
|
26d86cbcb5 | ||
|
|
b24a5d9aca | ||
|
|
5305bc1ceb | ||
|
|
a672f0f56c | ||
|
|
9ab5e13e8f | ||
|
|
a49171f8cc | ||
|
|
65d8dc412a | ||
|
|
8600400632 | ||
|
|
11d10bee12 | ||
|
|
dbd16e8285 | ||
|
|
eb26787079 | ||
|
|
b0a7b1eb3d | ||
|
|
f994092d7f | ||
|
|
946d8e5be5 | ||
|
|
6d7c2ae74a | ||
|
|
1ba71b0b1b | ||
|
|
47c3af6a0e | ||
|
|
e5527a5aa5 | ||
|
|
9bb0dcd73f | ||
|
|
4159804052 | ||
|
|
adb27cf143 | ||
|
|
a4879d854f | ||
|
|
8b92dfb889 | ||
|
|
eb4868cb6e | ||
|
|
e06e6e05ae | ||
|
|
4fce4f81c7 | ||
|
|
ae11283574 | ||
|
|
15fc9aa483 | ||
|
|
2ebfb8e6a9 | ||
|
|
d098ea675f | ||
|
|
8ad152e5fc | ||
|
|
14077fcf51 | ||
|
|
b427573e19 | ||
|
|
46268f0dcf | ||
|
|
006c178eb1 | ||
|
|
d63b20dabb | ||
|
|
fcf0a391ed | ||
|
|
263b9c4b3e | ||
|
|
1dc7355952 | ||
|
|
67e4a72a28 | ||
|
|
e6ea07f9b7 | ||
|
|
44a691ae29 | ||
|
|
290dbc43cb | ||
|
|
219f1f9f3f | ||
|
|
582170f26e | ||
|
|
4e2dad7720 | ||
|
|
d002ec72ad | ||
|
|
f6bb14f7c4 | ||
|
|
e1697848a5 | ||
|
|
4d48bba350 | ||
|
|
a690cc5564 | ||
|
|
be16f76034 | ||
|
|
ae4cf44728 | ||
|
|
4ac0df71b1 | ||
|
|
dbd948867c | ||
|
|
a9b5cd6c31 | ||
|
|
92f513d514 | ||
|
|
b239d21961 | ||
|
|
40e8dd4a8d | ||
|
|
a667435ef2 | ||
|
|
042b4e7587 | ||
|
|
c46a1b4a59 | ||
|
|
e6035d5479 | ||
|
|
008d090093 | ||
|
|
6ff080c36b | ||
|
|
086ca30323 | ||
|
|
17f3ecbbcb | ||
|
|
1f2c8c4ad2 | ||
|
|
bc03331c66 | ||
|
|
ffd1711d4f | ||
|
|
1cdbda1b6b | ||
|
|
fd15e5182d | ||
|
|
b4cc0fb0f3 | ||
|
|
fb2d03f2e1 | ||
|
|
ab261aba49 | ||
|
|
615bc8c5b9 | ||
|
|
3fc98c8c1b | ||
|
|
a5cc14e885 | ||
|
|
fe4c0a4f28 | ||
|
|
a21678b5b8 | ||
|
|
2272ea1139 | ||
|
|
e9affabf39 | ||
|
|
5a412000e1 | ||
|
|
30312de4dd | ||
|
|
d9a775de16 | ||
|
|
3d7cd78d0e | ||
|
|
ccd550bbc4 | ||
|
|
a6ffb5c61c | ||
|
|
cb7659bb86 | ||
|
|
391f21f57e | ||
|
|
fdce70937f | ||
|
|
2060619e5b | ||
|
|
d7fd1fc65b | ||
|
|
6760d7e776 | ||
|
|
d61187c836 | ||
|
|
f9932f9fee | ||
|
|
c003e56c03 | ||
|
|
4d94106bff | ||
|
|
3bd2d4f868 | ||
|
|
75b37ea0dc | ||
|
|
c356d0455d | ||
|
|
278f75e70c | ||
|
|
2c7c5a3dc3 | ||
|
|
806ffacedd | ||
|
|
93246f80c4 | ||
|
|
e9723d3f22 | ||
|
|
6baec7277f | ||
|
|
d43554d290 | ||
|
|
1922e5e014 | ||
|
|
6c065a64fa | ||
|
|
3b6e5853d8 | ||
|
|
463fee429b | ||
|
|
570b286ef9 | ||
|
|
4c5e71f33c | ||
|
|
3884483bca | ||
|
|
fc1a89cc63 | ||
|
|
c471eed808 | ||
|
|
35450dfc8f | ||
|
|
5360c60f3d | ||
|
|
f60c640dc6 | ||
|
|
91bb323e84 | ||
|
|
cf9e122bd2 | ||
|
|
7528ca18d8 | ||
|
|
05ee35b6bc | ||
|
|
0e8b069781 | ||
|
|
3b95d7278d | ||
|
|
7691706295 | ||
|
|
2af65ee946 | ||
|
|
279e1fd9c5 | ||
|
|
0f8f33e9fe | ||
|
|
12e91f1c6b | ||
|
|
f4f605867b | ||
|
|
ac40abba2e | ||
|
|
224604f2e7 | ||
|
|
ee4360de3a | ||
|
|
0af59b9602 | ||
|
|
ff8fe68f14 | ||
|
|
71c15e0ff5 | ||
|
|
6be1fbacde | ||
|
|
be594dd49e | ||
|
|
6b65d435fb | ||
|
|
0dc5212066 | ||
|
|
7c8ffd510e | ||
|
|
8e6423e873 | ||
|
|
c99f74b351 | ||
|
|
57b462223c | ||
|
|
055db97273 | ||
|
|
c80ebf73ee | ||
|
|
9b613294ae | ||
|
|
8cb73e1680 | ||
|
|
27dfa24cfb | ||
|
|
c53f0dbb30 | ||
|
|
db16a357e8 | ||
|
|
01e71958b2 | ||
|
|
f379519d40 | ||
|
|
54a09958d5 | ||
|
|
3421de06d5 | ||
|
|
e44eb01396 | ||
|
|
313143586b | ||
|
|
c8ae72893a | ||
|
|
cbc3735ca0 | ||
|
|
31fdbdf8c9 | ||
|
|
2741d0ab2a | ||
|
|
79b0187b58 | ||
|
|
bf5659d0e2 | ||
|
|
f6314cab69 | ||
|
|
4f5fe3d383 | ||
|
|
1c720d587c | ||
|
|
c46dc99224 | ||
|
|
5e43d4f20d | ||
|
|
359434bfd3 | ||
|
|
e755a2d4ec | ||
|
|
0b416cd03e | ||
|
|
857e0f251b | ||
|
|
f040c7c742 | ||
|
|
2e82c9d312 | ||
|
|
126923c33e | ||
|
|
26528d8bec | ||
|
|
f99da111f7 | ||
|
|
8c30472472 | ||
|
|
11131ebe06 | ||
|
|
8dd80589d6 | ||
|
|
51e27146f3 | ||
|
|
70717dcbe5 | ||
|
|
51cba32d8d | ||
|
|
b9076714cf | ||
|
|
b76caabd32 | ||
|
|
0922fd66a4 | ||
|
|
4e7e9b2cfc | ||
|
|
0c24134ac2 | ||
|
|
f96e418dd6 | ||
|
|
1627415cca | ||
|
|
d047c91399 | ||
|
|
8bec5550cf | ||
|
|
7a61ade4a0 | ||
|
|
0239be69e6 | ||
|
|
883bdc2879 | ||
|
|
c3457a4c8a | ||
|
|
d93506a18c | ||
|
|
0bb77a671b | ||
|
|
028f883499 | ||
|
|
727133e28b | ||
|
|
1bd08cb2db | ||
|
|
2962aa6166 | ||
|
|
bac55cd90d | ||
|
|
9b51936131 | ||
|
|
692665d0da | ||
|
|
14cdf33473 | ||
|
|
17f4f83ad6 | ||
|
|
c981606da7 | ||
|
|
e21d1bffe8 | ||
|
|
32706092f3 | ||
|
|
a7fe446550 | ||
|
|
bb01cfb330 | ||
|
|
2590efba4b | ||
|
|
186c5897a0 | ||
|
|
6150a008d5 | ||
|
|
fb2a86ba3f | ||
|
|
91fa762985 | ||
|
|
d80f760c92 | ||
|
|
ce2c887469 | ||
|
|
8b5c7c94cd | ||
|
|
3dd2a059bf | ||
|
|
2f724ffba2 | ||
|
|
0586ec3f42 | ||
|
|
4908463722 | ||
|
|
26d0ef9ac9 | ||
|
|
c2c9d992e7 | ||
|
|
ce9aa636c4 | ||
|
|
68d06dcd19 | ||
|
|
03bf93eb12 | ||
|
|
163eabb76c | ||
|
|
bed1bb2429 | ||
|
|
bc802b6f19 | ||
|
|
9b67a253f1 | ||
|
|
29dc5a8bb4 | ||
|
|
b63e516274 | ||
|
|
db9d5a0fb0 | ||
|
|
3f5e6faac5 | ||
|
|
eef6b95e24 | ||
|
|
96b76aea6b | ||
|
|
0745a12e7d | ||
|
|
645d5e19db | ||
|
|
45e2c7bd03 | ||
|
|
7ed1ced521 | ||
|
|
801b9c1483 | ||
|
|
3990bebca3 | ||
|
|
8a2de1001f |
@@ -8,7 +8,7 @@
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||
"args": {
|
||||
"VARIANT": "16-bullseye"
|
||||
"VARIANT": "18-bullseye"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
@@ -20,12 +20,13 @@
|
||||
"svelte.svelte-vscode",
|
||||
"ardenivanov.svelte-intellisense",
|
||||
"Prisma.prisma",
|
||||
"bradlc.vscode-tailwindcss"
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"waderyan.gitblame"
|
||||
],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000],
|
||||
"forwardPorts": [3000, 3001],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
"postCreateCommand": "cp apps/api/.env.example apps/api/.env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
.pnpm-store
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
@@ -9,4 +10,8 @@ package
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
local-serve
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/core*
|
||||
logs
|
||||
others/certificates
|
||||
|
||||
93
.github/workflows/fluent-bit-release.yml
vendored
Normal file
93
.github/workflows/fluent-bit-release.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
name: fluent-bit-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "others/fluentbit"
|
||||
- ".github/workflows/fluent-bit-release.yml"
|
||||
branches:
|
||||
- next
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-arm64
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-amd64
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify-fluent-bit:1.0.0 --amend coollabsio/coolify-fluent-bit:1.0.0-amd64 --amend coollabsio/coolify-fluent-bit:1.0.0-arm64 --amend coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||
docker manifest push coollabsio/coolify-fluent-bit:1.0.0
|
||||
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
name: pocketbase-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "others/pocketbase"
|
||||
- ".github/workflows/pocketbase-release.yml"
|
||||
branches:
|
||||
- next
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.8.0-arm64
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.8.0-amd64
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.8.0-aarch64
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/pocketbase:0.8.0 --amend coollabsio/pocketbase:0.8.0-amd64 --amend coollabsio/pocketbase:0.8.0-arm64 --amend coollabsio/pocketbase:0.8.0-aarch64
|
||||
docker manifest push coollabsio/pocketbase:0.8.0
|
||||
92
.github/workflows/production-release.yml
vendored
92
.github/workflows/production-release.yml
vendored
@@ -2,14 +2,14 @@ name: production-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
@@ -26,12 +26,86 @@ jobs:
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PROD_RELEASE_CHANNEL }}
|
||||
|
||||
90
.github/workflows/release-candidate.yml
vendored
Normal file
90
.github/workflows/release-candidate.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
name: release-candidate
|
||||
on:
|
||||
release:
|
||||
types: [prereleased]
|
||||
|
||||
jobs:
|
||||
arm64-making-something-cool:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{github.event.release.name}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
amd64-making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{github.event.release.name}}-amd64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
merge-manifest-to-be-cool:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [arm64-making-something-cool, amd64-making-something-cool]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:${{github.event.release.name}} --amend coollabsio/coolify:${{github.event.release.name}}-amd64 --amend coollabsio/coolify:${{github.event.release.name}}-arm64
|
||||
docker manifest push coollabsio/coolify:${{github.event.release.name}}
|
||||
|
||||
72
.github/workflows/staging-release.yml
vendored
72
.github/workflows/staging-release.yml
vendored
@@ -2,15 +2,23 @@ name: staging-release
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**"
|
||||
- "!others/fluentbit"
|
||||
- "!others/pocketbase"
|
||||
- "!.github/workflows/fluent-bit-release.yml"
|
||||
- "!.github/workflows/pocketbase-release.yml"
|
||||
branches:
|
||||
- next
|
||||
|
||||
jobs:
|
||||
staging-release:
|
||||
runs-on: ubuntu-latest
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
@@ -20,16 +28,66 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next,mode=max
|
||||
tags: coollabsio/coolify:next-amd64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [arm64, amd64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:next --amend coollabsio/coolify:next-amd64 --amend coollabsio/coolify:next-arm64
|
||||
docker manifest push coollabsio/coolify:next
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
.pnpm-store
|
||||
build
|
||||
.svelte-kit
|
||||
package
|
||||
@@ -9,5 +10,11 @@ package
|
||||
dist
|
||||
client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/core*
|
||||
apps/backup/backups/*
|
||||
!apps/backup/backups/.gitkeep
|
||||
logs
|
||||
others/certificates
|
||||
backups/*
|
||||
!backups/.gitkeep
|
||||
2
.gitpod.Dockerfile
vendored
Normal file
2
.gitpod.Dockerfile
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
FROM gitpod/workspace-full:2022-08-17-18-37-55
|
||||
RUN brew install buildpacks/tap/pack
|
||||
@@ -1,10 +1,11 @@
|
||||
# This configuration file was automatically generated by Gitpod.
|
||||
# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)
|
||||
# and commit this file to your remote git repository to share the goodness with others.
|
||||
image: gitpod/workspace-node:2022-06-20-19-54-55
|
||||
tasks:
|
||||
- init: pnpm install && pnpm db:push && pnpm db:seed
|
||||
command: pnpm dev
|
||||
#image:
|
||||
# file: .gitpod.Dockerfile
|
||||
#tasks:
|
||||
# - init: pnpm install && pnpm db:push && pnpm db:seed
|
||||
# command: pnpm dev
|
||||
|
||||
ports:
|
||||
- port: 3001
|
||||
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -1,11 +1,22 @@
|
||||
{
|
||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
||||
"i18n-ally.localesPaths": [
|
||||
"src/lib/locales"
|
||||
],
|
||||
"i18n-ally.keystyle": "nested",
|
||||
"i18n-ally.extract.ignoredByFiles": {
|
||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
||||
"src\\routes\\__layout.svelte": [
|
||||
"Coolify",
|
||||
"coolLabs logo"
|
||||
]
|
||||
},
|
||||
"i18n-ally.sourceLanguage": "en",
|
||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
||||
"i18n-ally.enabledFrameworks": [
|
||||
"svelte"
|
||||
],
|
||||
"i18n-ally.enabledParsers": [
|
||||
"js",
|
||||
"ts",
|
||||
"json"
|
||||
],
|
||||
"i18n-ally.extract.autoDetect": true
|
||||
}
|
||||
}
|
||||
94
CODE_OF_CONDUCT.md
Normal file
94
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# Citizen Code of Conduct
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
A primary goal of Coolify is to be inclusive to the largest number of contributors, with the most varied and diverse backgrounds possible. As such, we are committed to providing a friendly, safe and welcoming environment for all, regardless of gender, sexual orientation, ability, ethnicity, socioeconomic status, and religion (or lack thereof).
|
||||
|
||||
This code of conduct outlines our expectations for all those who participate in our community, as well as the consequences for unacceptable behavior.
|
||||
|
||||
We invite all those who participate in Coolify to help us create safe and positive experiences for everyone.
|
||||
|
||||
## 2. Open [Source/Culture/Tech] Citizenship
|
||||
|
||||
A supplemental goal of this Code of Conduct is to increase open [source/culture/tech] citizenship by encouraging participants to recognize and strengthen the relationships between our actions and their effects on our community.
|
||||
|
||||
Communities mirror the societies in which they exist and positive action is essential to counteract the many forms of inequality and abuses of power that exist in society.
|
||||
|
||||
If you see someone who is making an extra effort to ensure our community is welcoming, friendly, and encourages all participants to contribute to the fullest extent, we want to know.
|
||||
|
||||
## 3. Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
* Participate in an authentic and active way. In doing so, you contribute to the health and longevity of this community.
|
||||
* Exercise consideration and respect in your speech and actions.
|
||||
* Attempt collaboration before conflict.
|
||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
* Be mindful of your surroundings and of your fellow participants. Alert community leaders if you notice a dangerous situation, someone in distress, or violations of this Code of Conduct, even if they seem inconsequential.
|
||||
* Remember that community event venues may be shared with members of the public; please be respectful to all patrons of these locations.
|
||||
|
||||
## 4. Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable within our community:
|
||||
|
||||
* Violence, threats of violence or violent language directed against another person.
|
||||
* Sexist, racist, homophobic, transphobic, ableist or otherwise discriminatory jokes and language.
|
||||
* Posting or displaying sexually explicit or violent material.
|
||||
* Posting or threatening to post other people's personally identifying information ("doxing").
|
||||
* Personal insults, particularly those related to gender, sexual orientation, race, religion, or disability.
|
||||
* Inappropriate photography or recording.
|
||||
* Inappropriate physical contact. You should have someone's consent before touching them.
|
||||
* Unwelcome sexual attention. This includes, sexualized comments or jokes; inappropriate touching, groping, and unwelcomed sexual advances.
|
||||
* Deliberate intimidation, stalking or following (online or in person).
|
||||
* Advocating for, or encouraging, any of the above behavior.
|
||||
* Sustained disruption of community events, including talks and presentations.
|
||||
|
||||
## 5. Weapons Policy
|
||||
|
||||
No weapons will be allowed at Coolify events, community spaces, or in other spaces covered by the scope of this Code of Conduct. Weapons include but are not limited to guns, explosives (including fireworks), and large knives such as those used for hunting or display, as well as any other item used for the purpose of causing injury or harm to others. Anyone seen in possession of one of these items will be asked to leave immediately, and will only be allowed to return without the weapon. Community members are further expected to comply with all state and local laws on this matter.
|
||||
|
||||
## 6. Consequences of Unacceptable Behavior
|
||||
|
||||
Unacceptable behavior from any community member, including sponsors and those with decision-making authority, will not be tolerated.
|
||||
|
||||
Anyone asked to stop unacceptable behavior is expected to comply immediately.
|
||||
|
||||
If a community member engages in unacceptable behavior, the community organizers may take any action they deem appropriate, up to and including a temporary ban or permanent expulsion from the community without warning (and without refund in the case of a paid event).
|
||||
|
||||
## 7. Reporting Guidelines
|
||||
|
||||
If you are subject to or witness unacceptable behavior, or have any other concerns, please notify a community organizer as soon as possible. hi@coollabs.io.
|
||||
|
||||
|
||||
|
||||
Additionally, community organizers are available to help community members engage with local law enforcement or to otherwise help those experiencing unacceptable behavior feel safe. In the context of in-person events, organizers will also provide escorts as desired by the person experiencing distress.
|
||||
|
||||
## 8. Addressing Grievances
|
||||
|
||||
If you feel you have been falsely or unfairly accused of violating this Code of Conduct, you should notify coollabsio with a concise description of your grievance. Your grievance will be handled in accordance with our existing governing policies.
|
||||
|
||||
|
||||
|
||||
## 9. Scope
|
||||
|
||||
We expect all community participants (contributors, paid or otherwise; sponsors; and other guests) to abide by this Code of Conduct in all community venues--online and in-person--as well as in all one-on-one communications pertaining to community business.
|
||||
|
||||
This code of conduct and its related procedures also applies to unacceptable behavior occurring outside the scope of community activities when such behavior has the potential to adversely affect the safety and well-being of community members.
|
||||
|
||||
## 10. Contact info
|
||||
|
||||
hi@coollabs.io
|
||||
|
||||
## 11. License and attribution
|
||||
|
||||
The Citizen Code of Conduct is distributed by [Stumptown Syndicate](http://stumptownsyndicate.org) under a [Creative Commons Attribution-ShareAlike license](http://creativecommons.org/licenses/by-sa/3.0/).
|
||||
|
||||
Portions of text derived from the [Django Code of Conduct](https://www.djangoproject.com/conduct/) and the [Geek Feminism Anti-Harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy).
|
||||
|
||||
_Revision 2.3. Posted 6 March 2017._
|
||||
|
||||
_Revision 2.2. Posted 4 February 2016._
|
||||
|
||||
_Revision 2.1. Posted 23 June 2014._
|
||||
|
||||
_Revision 2.0, adopted by the [Stumptown Syndicate](http://stumptownsyndicate.org) board on 10 January 2013. Posted 17 March 2013._
|
||||
281
CONTRIBUTING.md
281
CONTRIBUTING.md
@@ -1,281 +0,0 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
Contribution guide is for v2, not applicable for v3
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
### Setup with github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup locally in your machine
|
||||
|
||||
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# How to start after you set up your local fork?
|
||||
|
||||
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
|
||||
#### Steps for local setup
|
||||
|
||||
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
2. Install dependencies with `pnpm install`.
|
||||
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
4. Seed the database with base entities with `pnpm db:seed`
|
||||
5. You can start coding after starting `pnpm dev`.
|
||||
|
||||
## 🧑💻 Developer contribution
|
||||
|
||||
### Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: Svelte / SvelteKit
|
||||
- **Database ORM**: Prisma.io
|
||||
- **Docker Engine**
|
||||
|
||||
### Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
### Tricky parts
|
||||
|
||||
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
|
||||
|
||||
---
|
||||
|
||||
# How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationshup with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Update global functions
|
||||
|
||||
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: Prisma.ServiceInclude = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
```js
|
||||
if (body.umami?.postgresqlPassword)
|
||||
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
|
||||
|
||||
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
|
||||
|
||||
### Create API endpoints.
|
||||
|
||||
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
|
||||
|
||||
#### `index.json.ts`:
|
||||
|
||||
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
|
||||
|
||||
```js
|
||||
import { getUserDetails } from '$lib/common';
|
||||
import * as db from '$lib/database';
|
||||
import { ErrorHandler } from '$lib/database';
|
||||
import type { RequestHandler } from '@sveltejs/kit';
|
||||
|
||||
export const post: RequestHandler = async (event) => {
|
||||
const { status, body } = await getUserDetails(event);
|
||||
if (status === 401) return { status, body };
|
||||
|
||||
const { id } = event.params;
|
||||
|
||||
let { name, fqdn } = await event.request.json();
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
|
||||
try {
|
||||
await db.updateService({ id, fqdn, name });
|
||||
return { status: 201 };
|
||||
} catch (error) {
|
||||
return ErrorHandler(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
If it's necessary, you can create your own database update function, specifically for the new service.
|
||||
|
||||
#### `start.json.ts`
|
||||
|
||||
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
|
||||
|
||||
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
|
||||
|
||||
#### `stop.json.ts`
|
||||
|
||||
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
|
||||
|
||||
## Frontend
|
||||
|
||||
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
2. You need to include it the logo at
|
||||
|
||||
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
|
||||
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
|
||||
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
|
||||
|
||||
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
|
||||
|
||||
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
|
||||
|
||||
## 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating!
|
||||
48
CONTRIBUTION.md
Normal file
48
CONTRIBUTION.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# Contributing
|
||||
|
||||
> "First, thanks for considering to contribute to my project.
|
||||
It really means a lot! 😁" - [@andrasbacsai](https://github.com/andrasbacsai)
|
||||
|
||||
You can ask for guidance anytime on our
|
||||
[Discord server](https://coollabs.io/discord) in the `#contribution` channel.
|
||||
|
||||
You'll need a set of skills to [get started](docs/contribution/GettingStarted.md).
|
||||
|
||||
## 1) Setup your development environment
|
||||
|
||||
- 🌟 [Container based](docs/dev_setup/Container.md) ← *Recomended*
|
||||
- 📦 [DockerContainer](docs/dev_setup/DockerContiner.md) *WIP
|
||||
- 🐙 [Github Codespaces](docs/dev_setup/GithubCodespaces.md)
|
||||
- ☁️ [GitPod](docs/dev_setup/GitPod.md)
|
||||
- 🍏 [Local Mac](docs/dev_setup/Mac.md)
|
||||
|
||||
## 2) Basic requirements
|
||||
|
||||
- [Install Pnpm](https://pnpm.io/installation)
|
||||
- [Install Docker Engine](https://docs.docker.com/engine/install/)
|
||||
- [Setup Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/)
|
||||
- [Setup GIT LFS Support](https://git-lfs.github.com/)
|
||||
|
||||
## 3) Setup Coolify
|
||||
|
||||
- Copy `apps/api/.env.example` to `apps/api/.env`
|
||||
- Edit `apps/api/.env`, set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
- Run `pnpm install` to install dependencies.
|
||||
- Run `pnpm db:push` to o create a local SQlite database. This will apply all migrations at `db/dev.db`.
|
||||
- Run `pnpm db:seed` seed the database.
|
||||
- Run `pnpm dev` start coding.
|
||||
|
||||
```sh
|
||||
# Or... Copy and paste commands bellow:
|
||||
cp apps/api/.env.example apps/api/.env
|
||||
pnpm install
|
||||
pnpm db:push
|
||||
pnpm db:seed
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## 4) Start Coding
|
||||
|
||||
You should be able to access `http://localhost:3000`.
|
||||
|
||||
1. Click `Register` and setup your first user.
|
||||
46
Dockerfile
46
Dockerfile
@@ -1,47 +1,53 @@
|
||||
FROM node:18-alpine3.16 as build
|
||||
ARG PNPM_VERSION=7.11.0
|
||||
|
||||
FROM node:18-slim as build
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
RUN apt update && apt -y install curl
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
RUN pnpm build
|
||||
|
||||
# Production build
|
||||
FROM node:18-alpine3.16
|
||||
FROM node:18-slim
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV production
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
# https://download.docker.com/linux/static/stable/
|
||||
ARG DOCKER_VERSION=20.10.18
|
||||
# https://github.com/docker/compose/releases
|
||||
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
|
||||
ARG DOCKER_COMPOSE_VERSION=2.6.1
|
||||
# https://github.com/buildpacks/pack/releases
|
||||
ARG PACK_VERSION=v0.27.0
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:3.15 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl psmisc
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
|
||||
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
RUN npm install -g npm@${PNPM_VERSION}
|
||||
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
# https://download.docker.com/linux/static/stable/
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||
# https://github.com/docker/compose/releases
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.7.0 -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
|
||||
RUN (curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.27.0/pack-v0.27.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack)
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
|
||||
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
||||
|
||||
COPY --from=build /app/apps/api/build/ .
|
||||
COPY --from=build /app/others/fluentbit/ ./fluentbit
|
||||
COPY --from=build /app/apps/ui/build/ ./public
|
||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||
COPY --from=build /app/apps/api/package.json .
|
||||
COPY --from=build /app/docker-compose.yaml .
|
||||
COPY --from=build /app/apps/api/tags.json .
|
||||
COPY --from=build /app/apps/api/templates.json .
|
||||
|
||||
RUN pnpm install -p
|
||||
|
||||
EXPOSE 3000
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
CMD pnpm start
|
||||
31
Dockerfile-dev
Normal file
31
Dockerfile-dev
Normal file
@@ -0,0 +1,31 @@
|
||||
FROM node:18-slim
|
||||
ENV NODE_ENV development
|
||||
ARG TARGETPLATFORM
|
||||
ARG PNPM_VERSION=7.11.0
|
||||
ARG NPM_VERSION=8.19.1
|
||||
# https://download.docker.com/linux/static/stable/
|
||||
ARG DOCKER_VERSION=20.10.18
|
||||
# https://github.com/docker/compose/releases
|
||||
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
|
||||
ARG DOCKER_COMPOSE_VERSION=2.6.1
|
||||
# https://github.com/buildpacks/pack/releases
|
||||
ARG PACK_VERSION=v0.27.0
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
|
||||
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
|
||||
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
RUN npm install -g npm@${PNPM_VERSION}
|
||||
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
|
||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
|
||||
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
||||
|
||||
EXPOSE 3000
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
254
README.md
254
README.md
@@ -1,9 +1,132 @@
|
||||
# Coolify
|
||||
|
||||
An open-source & self-hostable Heroku / Netlify alternative
|
||||
(ARM support is in beta).
|
||||
An open-source & self-hostable Heroku / Netlify alternative.
|
||||
|
||||
## Financial Contributors
|
||||
## Live Demo
|
||||
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsive, that means someone overloaded the server. 😄)
|
||||
|
||||
## Feedback
|
||||
|
||||
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
---
|
||||
|
||||
## How to install
|
||||
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
Installation is automated with the following command:
|
||||
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
If you would like no questions during installation:
|
||||
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
### Git Sources
|
||||
|
||||
Self-hosted versions also!
|
||||
|
||||
<a href="https://github.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/github.com"></a>
|
||||
<a href="https://gitlab.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/gitlab.com"></a>
|
||||
|
||||
### Destinations
|
||||
|
||||
Deploy your resource to:
|
||||
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine
|
||||
|
||||
### Applications
|
||||
|
||||
<a href="https://heroku.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/heroku.com"></a>
|
||||
<a href="https://html5.org/">
|
||||
<svg style="width:40px;height:40px" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg" ><g clip-path="url(#HTML5_Clip0_4)" ><path d="M30.216 0L27.6454 28.7967L16.0907 32L4.56783 28.8012L2 0H30.216Z" fill="#E44D26" /><path d="M16.108 29.5515L25.4447 26.963L27.6415 2.35497H16.108V29.5515Z" fill="#F16529" /><path d="M11.1109 9.4197H16.108V5.88731H7.25053L7.33509 6.83499L8.20327 16.5692H16.108V13.0369H11.4338L11.1109 9.4197Z" fill="#EBEBEB" /><path d="M11.907 18.3354H8.36111L8.856 23.8818L16.0917 25.8904L16.108 25.8859V22.2108L16.0925 22.2149L12.1585 21.1527L11.907 18.3354Z" fill="#EBEBEB" /><path d="M16.0958 16.5692H20.4455L20.0354 21.1504L16.0958 22.2138V25.8887L23.3373 23.8817L23.3904 23.285L24.2205 13.9855L24.3067 13.0369H16.0958V16.5692Z" fill="white" /><path d="M16.0958 9.41105V9.41969H24.6281L24.6989 8.62572L24.8599 6.83499L24.9444 5.88731H16.0958V9.41105Z" fill="white" /></g><defs><clipPath id="HTML5_Clip0_4"><rect width="32" height="32" fill="white" /></clipPath></defs></svg></a>
|
||||
<a href="https://nodejs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nodejs.org"></a>
|
||||
<a href="https://vuejs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/vuejs.org"></a>
|
||||
<a href="https://nuxtjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nuxtjs.org"></a>
|
||||
<a href="https://nextjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nextjs.org"></a>
|
||||
<a href="https://reactjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/reactjs.org"></a>
|
||||
<a href="https://preactjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/preactjs.org"></a>
|
||||
<a href="https://gatsbyjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/gatsbyjs.org"></a>
|
||||
<a href="https://svelte.dev"><img style="width:40px;height:40px" src="https://icon.horse/icon/svelte.dev"></a>
|
||||
<a href="https://php.net"><img style="width:40px;height:40px" src="https://icon.horse/icon/php.net"></a>
|
||||
<a href="https://laravel.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/laravel.com"></a>
|
||||
<a href="https://python.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/python.org"></a>
|
||||
<a href="https://deno.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/deno.com"></a>
|
||||
<a href="https://docker.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/docker.com"></a>
|
||||
|
||||
### Databases
|
||||
|
||||
<a href="https://mongodb.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/mongodb.org"></a>
|
||||
<a href="https://mariadb.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/mariadb.org"></a>
|
||||
<a href="https://mysql.com"><svg style="width:40px;height:40px" xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 25.6 25.6" ><path d="M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z" transform="matrix(.390229 0 0 .38781 -46.300037 -16.856717)" fill-rule="evenodd" fill="#00678c" /></svg></a>
|
||||
<a href="https://postgresql.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/postgresql.org"></a>
|
||||
<a href="https://couchdb.apache.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/couchdb.apache.org"></a>
|
||||
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
||||
|
||||
### Services
|
||||
|
||||
- [Appwrite](https://appwrite.io)
|
||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
- [GlitchTip](https://glitchtip.com)
|
||||
- And more...
|
||||
|
||||
## Support
|
||||
|
||||
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://coollabs.io/discord)
|
||||
|
||||
---
|
||||
|
||||
## ⚗️ Expertise Contributions
|
||||
|
||||
Coolify is developed under the [Apache License](./LICENSE) and you can help to make it grow.
|
||||
Our community will be glad to have you on board!
|
||||
|
||||
Learn how to contribute to Coolify as as ...
|
||||
|
||||
→ [👩🏾💻 Software developer](./CONTRIBUTION.md)
|
||||
|
||||
→ [🧑🏻🏫 Translator](./docs/contribution/Translating.md)
|
||||
|
||||
<!--
|
||||
→ 🧑🏽🎨 Designer
|
||||
→ 🙋♀️ Community Managemer
|
||||
→ 🧙🏻♂️ Text Content Creator
|
||||
→ 👨🏼🎤 Video Content Creator
|
||||
-->
|
||||
|
||||
---
|
||||
|
||||
## 💰 Financial Contributors
|
||||
|
||||
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
|
||||
|
||||
@@ -24,127 +147,4 @@ Support this project with your organization. Your logo will show up here with a
|
||||
<a href="https://opencollective.com/coollabsio/organization/6/website"><img src="https://opencollective.com/coollabsio/organization/6/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/coollabsio/organization/7/website"><img src="https://opencollective.com/coollabsio/organization/7/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/coollabsio/organization/8/website"><img src="https://opencollective.com/coollabsio/organization/8/avatar.svg"></a>
|
||||
<a href="https://opencollective.com/coollabsio/organization/9/website"><img src="https://opencollective.com/coollabsio/organization/9/avatar.svg"></a>
|
||||
|
||||
---
|
||||
|
||||
## Live Demo
|
||||
|
||||
https://demo.coolify.io/
|
||||
|
||||
(If it is unresponsive, that means someone overloaded the server. 😄)
|
||||
|
||||
## Feedback
|
||||
|
||||
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
---
|
||||
|
||||
## How to install
|
||||
|
||||
Installation is automated with the following command:
|
||||
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
|
||||
```
|
||||
|
||||
If you would like no questions during installation:
|
||||
|
||||
```bash
|
||||
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
|
||||
```
|
||||
|
||||
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
### Git Sources
|
||||
|
||||
You can use the following Git Sources to be auto-deployed to your Coolify instance! (Self-hosted versions are also supported.)
|
||||
|
||||
<a href="https://github.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/github.com"></a>
|
||||
<a href="https://gitlab.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/gitlab.com"></a>
|
||||
|
||||
### Destinations
|
||||
|
||||
You can deploy your applications to the following destinations:
|
||||
|
||||
- Local Docker Engine
|
||||
- Remote Docker Engine
|
||||
|
||||
### Applications
|
||||
|
||||
Predefined build packs to cover the basic needs to deploy applications.
|
||||
|
||||
If you have an advanced use case, you can use the Docker build pack that allows you to deploy your application based on your custom Dockerfile.
|
||||
|
||||
<a href="https://html5.org/">
|
||||
<svg style="width:40px;height:40px" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg" ><g clip-path="url(#HTML5_Clip0_4)" ><path d="M30.216 0L27.6454 28.7967L16.0907 32L4.56783 28.8012L2 0H30.216Z" fill="#E44D26" /><path d="M16.108 29.5515L25.4447 26.963L27.6415 2.35497H16.108V29.5515Z" fill="#F16529" /><path d="M11.1109 9.4197H16.108V5.88731H7.25053L7.33509 6.83499L8.20327 16.5692H16.108V13.0369H11.4338L11.1109 9.4197Z" fill="#EBEBEB" /><path d="M11.907 18.3354H8.36111L8.856 23.8818L16.0917 25.8904L16.108 25.8859V22.2108L16.0925 22.2149L12.1585 21.1527L11.907 18.3354Z" fill="#EBEBEB" /><path d="M16.0958 16.5692H20.4455L20.0354 21.1504L16.0958 22.2138V25.8887L23.3373 23.8817L23.3904 23.285L24.2205 13.9855L24.3067 13.0369H16.0958V16.5692Z" fill="white" /><path d="M16.0958 9.41105V9.41969H24.6281L24.6989 8.62572L24.8599 6.83499L24.9444 5.88731H16.0958V9.41105Z" fill="white" /></g><defs><clipPath id="HTML5_Clip0_4"><rect width="32" height="32" fill="white" /></clipPath></defs></svg></a>
|
||||
<a href="https://nodejs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nodejs.org"></a>
|
||||
<a href="https://vuejs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/vuejs.org"></a>
|
||||
<a href="https://nuxtjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nuxtjs.org"></a>
|
||||
<a href="https://nextjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/nextjs.org"></a>
|
||||
<a href="https://reactjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/reactjs.org"></a>
|
||||
<a href="https://preactjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/preactjs.org"></a>
|
||||
<a href="https://gatsbyjs.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/gatsbyjs.org"></a>
|
||||
<a href="https://svelte.dev"><img style="width:40px;height:40px" src="https://icon.horse/icon/svelte.dev"></a>
|
||||
<a href="https://php.net"><img style="width:40px;height:40px" src="https://icon.horse/icon/php.net"></a>
|
||||
<a href="https://laravel.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/laravel.com"></a>
|
||||
<a href="https://python.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/python.org"></a>
|
||||
<a href="https://deno.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/deno.com"></a>
|
||||
<a href="https://docker.com"><img style="width:40px;height:40px" src="https://icon.horse/icon/docker.com"></a>
|
||||
|
||||
If you have a new build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
### Databases
|
||||
|
||||
One-click database is ready to be used internally or shared over the internet:
|
||||
|
||||
<a href="https://mongodb.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/mongodb.org"></a>
|
||||
<a href="https://mariadb.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/mariadb.org"></a>
|
||||
<a href="https://mysql.com"><svg style="width:40px;height:40px" xmlns="http://www.w3.org/2000/svg" width="64" height="64" viewBox="0 0 25.6 25.6" ><path d="M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z" transform="matrix(.390229 0 0 .38781 -46.300037 -16.856717)" fill-rule="evenodd" fill="#00678c" /></svg></a>
|
||||
<a href="https://postgresql.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/postgresql.org"></a>
|
||||
<a href="https://couchdb.apache.org"><img style="width:40px;height:40px" src="https://icon.horse/icon/couchdb.apache.org"></a>
|
||||
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
||||
|
||||
If you have a new database you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
|
||||
### Services
|
||||
|
||||
You quickly need to host a self-hostable, open-source service? You can do it with a few clicks!
|
||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||
- [Ghost](https://ghost.org)
|
||||
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||
- [NocoDB](https://nocodb.com)
|
||||
- [VSCode Server](https://github.com/cdr/code-server)
|
||||
- [MinIO](https://min.io)
|
||||
- [VaultWarden](https://github.com/dani-garcia/vaultwarden)
|
||||
- [LanguageTool](https://languagetool.org)
|
||||
- [n8n](https://n8n.io)
|
||||
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
|
||||
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
|
||||
- [Umami](https://github.com/mikecao/umami)
|
||||
- [Fider](https://fider.io)
|
||||
- [Hasura](https://hasura.io)
|
||||
|
||||
|
||||
If you have a new service you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||
|
||||
## Migration from v1
|
||||
|
||||
A fresh installation is necessary. v2 and v3 are not compatible with v1.
|
||||
|
||||
## Support
|
||||
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://discord.gg/xhBCC7eGKw)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Please see the [LICENSE](/LICENSE) file in our repository for the full text.
|
||||
<a href="https://opencollective.com/coollabsio/organization/9/website"><img src="https://opencollective.com/coollabsio/organization/9/avatar.svg"></a>
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
COOLIFY_APP_ID=
|
||||
COOLIFY_SECRET_KEY="12341234123412341234123412341234 - 32 long"
|
||||
COOLIFY_DATABASE_URL=
|
||||
COOLIFY_SENTRY_DSN=
|
||||
|
||||
COOLIFY_IS_ON=docker
|
||||
COOLIFY_WHITE_LABELED=false
|
||||
COOLIFY_WHITE_LABELED_ICON=
|
||||
COOLIFY_AUTO_UPDATE=
|
||||
COOLIFY_APP_ID=local-dev
|
||||
# 32 bits long secret key
|
||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||
|
||||
COOLIFY_IS_ON=docker
|
||||
COOLIFY_WHITE_LABELED=false
|
||||
COOLIFY_WHITE_LABELED_ICON=
|
||||
COOLIFY_AUTO_UPDATE=
|
||||
|
||||
1
apps/api/devTags.json
Normal file
1
apps/api/devTags.json
Normal file
File diff suppressed because one or more lines are too long
3362
apps/api/devTemplates.yaml
Normal file
3362
apps/api/devTemplates.yaml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,11 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ignore": ["src/**/*.test.ts"],
|
||||
"ext": "ts,mjs,json,graphql",
|
||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
|
||||
"legacyWatch": true
|
||||
}
|
||||
"watch": [
|
||||
"src"
|
||||
],
|
||||
"ignore": [
|
||||
"src/**/*.test.ts"
|
||||
],
|
||||
"ext": "ts,mjs,json,graphql",
|
||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --platform=node --outdir=build --format=cjs && node build",
|
||||
"legacyWatch": true
|
||||
}
|
||||
@@ -3,6 +3,7 @@
|
||||
"description": "Coolify's Fastify API",
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"db:generate": "prisma generate",
|
||||
"db:push": "prisma db push && prisma generate",
|
||||
"db:seed": "prisma db seed",
|
||||
"db:studio": "prisma studio",
|
||||
@@ -11,59 +12,72 @@
|
||||
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
|
||||
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.2.0",
|
||||
"@fastify/cookie": "7.3.1",
|
||||
"@fastify/cors": "8.1.0",
|
||||
"@fastify/autoload": "5.5.0",
|
||||
"@fastify/cookie": "8.3.0",
|
||||
"@fastify/cors": "8.2.0",
|
||||
"@fastify/env": "4.1.0",
|
||||
"@fastify/jwt": "6.3.2",
|
||||
"@fastify/static": "6.5.0",
|
||||
"@fastify/jwt": "6.3.3",
|
||||
"@fastify/multipart": "7.3.0",
|
||||
"@fastify/static": "6.5.1",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@prisma/client": "3.15.2",
|
||||
"axios": "0.27.2",
|
||||
"@ladjs/graceful": "3.0.2",
|
||||
"@prisma/client": "4.6.1",
|
||||
"@sentry/node": "7.21.1",
|
||||
"@sentry/tracing": "7.21.1",
|
||||
"axe": "11.0.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.2",
|
||||
"cabin": "9.1.2",
|
||||
"compare-versions": "4.1.3",
|
||||
"cabin": "11.0.1",
|
||||
"compare-versions": "5.0.1",
|
||||
"csv-parse": "5.3.2",
|
||||
"csvtojson": "2.0.10",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.4",
|
||||
"dockerode": "3.3.3",
|
||||
"dayjs": "1.11.6",
|
||||
"dockerode": "3.3.4",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"fastify": "4.4.0",
|
||||
"fastify-plugin": "4.1.0",
|
||||
"execa": "6.1.0",
|
||||
"fastify": "4.10.2",
|
||||
"fastify-plugin": "4.3.0",
|
||||
"fastify-socket.io": "4.0.0",
|
||||
"generate-password": "1.7.0",
|
||||
"get-port": "6.1.2",
|
||||
"got": "12.3.1",
|
||||
"got": "12.5.3",
|
||||
"is-ip": "5.0.0",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"minimist": "^1.2.7",
|
||||
"node-forge": "1.3.1",
|
||||
"node-os-utils": "1.3.7",
|
||||
"p-queue": "7.3.0",
|
||||
"p-all": "4.0.0",
|
||||
"p-throttle": "5.0.0",
|
||||
"prisma": "4.6.1",
|
||||
"public-ip": "6.0.1",
|
||||
"pump": "3.0.0",
|
||||
"shell-quote": "^1.7.4",
|
||||
"socket.io": "4.5.3",
|
||||
"ssh-config": "4.1.6",
|
||||
"strip-ansi": "7.0.1",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.6.5",
|
||||
"@types/node": "18.11.9",
|
||||
"@types/node-os-utils": "1.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.33.0",
|
||||
"@typescript-eslint/parser": "5.33.0",
|
||||
"esbuild": "0.15.0",
|
||||
"eslint": "8.21.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.44.0",
|
||||
"@typescript-eslint/parser": "5.44.0",
|
||||
"esbuild": "0.15.15",
|
||||
"eslint": "8.28.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-prettier": "4.2.1",
|
||||
"nodemon": "2.0.19",
|
||||
"nodemon": "2.0.20",
|
||||
"prettier": "2.7.1",
|
||||
"prisma": "3.15.2",
|
||||
"rimraf": "3.0.2",
|
||||
"tsconfig-paths": "4.1.0",
|
||||
"typescript": "4.7.4"
|
||||
"types-fastify-socket.io": "0.0.1",
|
||||
"typescript": "4.9.3"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.js"
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "GlitchTip" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"defaultUsername" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
|
||||
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
|
||||
"emailSmtpPort" INTEGER DEFAULT 25,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpUseTls" BOOLEAN DEFAULT false,
|
||||
"emailSmtpUseSsl" BOOLEAN DEFAULT false,
|
||||
"emailBackend" TEXT,
|
||||
"mailgunApiKey" TEXT,
|
||||
"sendgridApiKey" TEXT,
|
||||
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Appwrite" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"opensslKeyV1" TEXT NOT NULL,
|
||||
"executorSecret" TEXT NOT NULL,
|
||||
"redisPassword" TEXT NOT NULL,
|
||||
"mariadbHost" TEXT,
|
||||
"mariadbPort" INTEGER NOT NULL DEFAULT 3306,
|
||||
"mariadbUser" TEXT NOT NULL,
|
||||
"mariadbPassword" TEXT NOT NULL,
|
||||
"mariadbRootUser" TEXT NOT NULL,
|
||||
"mariadbRootUserPassword" TEXT NOT NULL,
|
||||
"mariadbDatabase" TEXT NOT NULL,
|
||||
"mariadbPublicPort" INTEGER,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Appwrite_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Appwrite_serviceId_key" ON "Appwrite"("serviceId");
|
||||
@@ -0,0 +1,20 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "DNSServers" TEXT;
|
||||
@@ -0,0 +1,42 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"forPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||
DROP TABLE "GitSource";
|
||||
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Searxng" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"secretKey" TEXT NOT NULL,
|
||||
"redisPassword" TEXT NOT NULL,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Searxng_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Searxng_serviceId_key" ON "Searxng"("serviceId");
|
||||
@@ -0,0 +1,29 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"DNSServers" TEXT,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "arch", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Build" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"type" TEXT NOT NULL,
|
||||
"applicationId" TEXT,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"commit" TEXT,
|
||||
"pullmergeRequestId" TEXT,
|
||||
"forceRebuild" BOOLEAN NOT NULL DEFAULT false,
|
||||
"sourceBranch" TEXT,
|
||||
"branch" TEXT,
|
||||
"status" TEXT DEFAULT 'queued',
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Build" ("applicationId", "branch", "commit", "createdAt", "destinationDockerId", "gitSourceId", "githubAppId", "gitlabAppId", "id", "status", "type", "updatedAt") SELECT "applicationId", "branch", "commit", "createdAt", "destinationDockerId", "gitSourceId", "githubAppId", "gitlabAppId", "id", "status", "type", "updatedAt" FROM "Build";
|
||||
DROP TABLE "Build";
|
||||
ALTER TABLE "new_Build" RENAME TO "Build";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Weblate" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"adminPassword" TEXT NOT NULL,
|
||||
"postgresqlHost" TEXT NOT NULL,
|
||||
"postgresqlPort" INTEGER NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Weblate_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Weblate_serviceId_key" ON "Weblate"("serviceId");
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Taiga" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"secretKey" TEXT NOT NULL,
|
||||
"erlangSecret" TEXT NOT NULL,
|
||||
"djangoAdminPassword" TEXT NOT NULL,
|
||||
"djangoAdminUser" TEXT NOT NULL,
|
||||
"rabbitMQUser" TEXT NOT NULL,
|
||||
"rabbitMQPassword" TEXT NOT NULL,
|
||||
"postgresqlHost" TEXT NOT NULL,
|
||||
"postgresqlPort" INTEGER NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Taiga_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Taiga_serviceId_key" ON "Taiga"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to alter the column `time` on the `BuildLog` table. The data in that column could be lost. The data in that column will be cast from `Int` to `BigInt`.
|
||||
|
||||
*/
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" BIGINT NOT NULL
|
||||
);
|
||||
INSERT INTO "new_BuildLog" ("applicationId", "buildId", "id", "line", "time") SELECT "applicationId", "buildId", "id", "line", "time" FROM "BuildLog";
|
||||
DROP TABLE "BuildLog";
|
||||
ALTER TABLE "new_BuildLog" RENAME TO "BuildLog";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationConnectedDatabase" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"hostedDatabaseType" TEXT,
|
||||
"hostedDatabaseHost" TEXT,
|
||||
"hostedDatabasePort" INTEGER,
|
||||
"hostedDatabaseName" TEXT,
|
||||
"hostedDatabaseUser" TEXT,
|
||||
"hostedDatabasePassword" TEXT,
|
||||
"hostedDatabaseDBName" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationConnectedDatabase_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "ApplicationConnectedDatabase_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationConnectedDatabase_applicationId_key" ON "ApplicationConnectedDatabase"("applicationId");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "isAPIDebuggingEnabled" BOOLEAN DEFAULT false;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
CONSTRAINT "DatabaseSecret_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSecret_name_databaseId_key" ON "DatabaseSecret"("name", "databaseId");
|
||||
@@ -0,0 +1,18 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "previewApplicationId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PreviewApplication" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"pullmergeRequestId" TEXT NOT NULL,
|
||||
"sourceBranch" TEXT NOT NULL,
|
||||
"isRandomDomain" BOOLEAN NOT NULL DEFAULT false,
|
||||
"customDomain" TEXT,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PreviewApplication_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PreviewApplication_applicationId_key" ON "PreviewApplication"("applicationId");
|
||||
@@ -0,0 +1,10 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Certificate" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"key" TEXT NOT NULL,
|
||||
"cert" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"teamId" TEXT,
|
||||
CONSTRAINT "Certificate_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
@@ -0,0 +1,23 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,26 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"forPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||
DROP TABLE "GitSource";
|
||||
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- DropIndex
|
||||
DROP INDEX "PreviewApplication_applicationId_key";
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "sourceRepository" TEXT;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerComposeFile" TEXT;
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerComposeFileLocation" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerComposeConfiguration" TEXT;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ServiceSetting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServiceSetting_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServiceSetting_serviceId_name_key" ON "ServiceSetting"("serviceId", "name");
|
||||
@@ -0,0 +1,19 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ServicePersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"volumeName" TEXT,
|
||||
"predefined" BOOLEAN NOT NULL DEFAULT false,
|
||||
"containerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ServicePersistentStorage" ("createdAt", "id", "path", "serviceId", "updatedAt") SELECT "createdAt", "id", "path", "serviceId", "updatedAt" FROM "ServicePersistentStorage";
|
||||
DROP TABLE "ServicePersistentStorage";
|
||||
ALTER TABLE "new_ServicePersistentStorage" RENAME TO "ServicePersistentStorage";
|
||||
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- Added the required column `variableName` to the `ServiceSetting` table without a default value. This is not possible if the table is not empty.
|
||||
|
||||
*/
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ServiceSetting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"variableName" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ServiceSetting_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ServiceSetting" ("createdAt", "id", "name", "serviceId", "updatedAt", "value") SELECT "createdAt", "id", "name", "serviceId", "updatedAt", "value" FROM "ServiceSetting";
|
||||
DROP TABLE "ServiceSetting";
|
||||
ALTER TABLE "new_ServiceSetting" RENAME TO "ServiceSetting";
|
||||
CREATE UNIQUE INDEX "ServiceSetting_serviceId_name_key" ON "ServiceSetting"("serviceId", "name");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,21 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Service" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"exposePort" INTEGER,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"version" TEXT,
|
||||
"templateVersion" TEXT NOT NULL DEFAULT '0.0.0',
|
||||
"destinationDockerId" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "dualCerts", "exposePort", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "dualCerts", "exposePort", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
|
||||
DROP TABLE "Service";
|
||||
ALTER TABLE "new_Service" RENAME TO "Service";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,11 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- A unique constraint covering the columns `[serviceId,containerId,path]` on the table `ServicePersistentStorage` will be added. If there are existing duplicate values, this will fail.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "ServicePersistentStorage_serviceId_path_key";
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_containerId_path_key" ON "ServicePersistentStorage"("serviceId", "containerId", "path");
|
||||
@@ -0,0 +1,32 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Wordpress" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"extraConfig" TEXT,
|
||||
"tablePrefix" TEXT,
|
||||
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
|
||||
"mysqlHost" TEXT,
|
||||
"mysqlPort" INTEGER,
|
||||
"mysqlUser" TEXT,
|
||||
"mysqlPassword" TEXT,
|
||||
"mysqlRootUser" TEXT,
|
||||
"mysqlRootUserPassword" TEXT,
|
||||
"mysqlDatabase" TEXT,
|
||||
"mysqlPublicPort" INTEGER,
|
||||
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"ftpUser" TEXT,
|
||||
"ftpPassword" TEXT,
|
||||
"ftpPublicPort" INTEGER,
|
||||
"ftpHostKey" TEXT,
|
||||
"ftpHostKeyPrivate" TEXT,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlHost", "mysqlPassword", "mysqlPort", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "ownMysql", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlHost", "mysqlPassword", "mysqlPort", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "ownMysql", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||
DROP TABLE "Wordpress";
|
||||
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "proxyDefaultRedirect" TEXT;
|
||||
@@ -0,0 +1,45 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"proxyPassword" TEXT NOT NULL,
|
||||
"proxyUser" TEXT NOT NULL,
|
||||
"proxyHash" TEXT,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"DNSServers" TEXT,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_ApplicationPersistentStorage" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"path" TEXT NOT NULL,
|
||||
"oldPath" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationPersistentStorage" ("applicationId", "createdAt", "id", "path", "updatedAt") SELECT "applicationId", "createdAt", "id", "path", "updatedAt" FROM "ApplicationPersistentStorage";
|
||||
DROP TABLE "ApplicationPersistentStorage";
|
||||
ALTER TABLE "new_ApplicationPersistentStorage" RENAME TO "ApplicationPersistentStorage";
|
||||
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `proxyHash` on the `Setting` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `proxyPassword` on the `Setting` table. All the data in the column will be lost.
|
||||
- You are about to drop the column `proxyUser` on the `Setting` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"DNSServers" TEXT,
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,59 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "DockerRegistry" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"url" TEXT NOT NULL,
|
||||
"username" TEXT,
|
||||
"password" TEXT,
|
||||
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"teamId" TEXT,
|
||||
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"exposePort" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"deploymentType" TEXT,
|
||||
"phpModules" TEXT,
|
||||
"pythonWSGI" TEXT,
|
||||
"pythonModule" TEXT,
|
||||
"pythonVariable" TEXT,
|
||||
"dockerFileLocation" TEXT,
|
||||
"denoMainFile" TEXT,
|
||||
"denoOptions" TEXT,
|
||||
"dockerComposeFile" TEXT,
|
||||
"dockerComposeFileLocation" TEXT,
|
||||
"dockerComposeConfiguration" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"baseImage" TEXT,
|
||||
"baseBuildImage" TEXT,
|
||||
"dockerRegistryId" TEXT NOT NULL DEFAULT '0',
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||
DROP TABLE "Application";
|
||||
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,30 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"DNSServers" TEXT,
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,60 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"DNSServers" TEXT,
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", coalesce("isAPIDebuggingEnabled", false) AS "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
CREATE TABLE "new_GlitchTip" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"secretKeyBase" TEXT,
|
||||
"defaultEmail" TEXT NOT NULL,
|
||||
"defaultUsername" TEXT NOT NULL,
|
||||
"defaultPassword" TEXT NOT NULL,
|
||||
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
|
||||
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
|
||||
"emailSmtpPort" INTEGER DEFAULT 25,
|
||||
"emailSmtpUser" TEXT,
|
||||
"emailSmtpPassword" TEXT,
|
||||
"emailSmtpUseTls" BOOLEAN NOT NULL DEFAULT false,
|
||||
"emailSmtpUseSsl" BOOLEAN NOT NULL DEFAULT false,
|
||||
"emailBackend" TEXT,
|
||||
"mailgunApiKey" TEXT,
|
||||
"sendgridApiKey" TEXT,
|
||||
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GlitchTip" ("createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUseSsl", "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt") SELECT "createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", coalesce("emailSmtpUseSsl", false) AS "emailSmtpUseSsl", coalesce("emailSmtpUseTls", false) AS "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt" FROM "GlitchTip";
|
||||
DROP TABLE "GlitchTip";
|
||||
ALTER TABLE "new_GlitchTip" RENAME TO "GlitchTip";
|
||||
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "sentryDSN" TEXT;
|
||||
@@ -0,0 +1,31 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||
"sentryDSN" TEXT,
|
||||
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT coalesce("DNSServers", '1.1.1.1,8.8.8.8') AS "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,33 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_Setting" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"fqdn" TEXT,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||
"ipv4" TEXT,
|
||||
"ipv6" TEXT,
|
||||
"arch" TEXT,
|
||||
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||
"numberOfDockerImagesKeptLocally" INTEGER NOT NULL DEFAULT 3,
|
||||
"proxyDefaultRedirect" TEXT,
|
||||
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||
"sentryDSN" TEXT,
|
||||
"previewSeparator" TEXT NOT NULL DEFAULT '.',
|
||||
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL
|
||||
);
|
||||
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||
DROP TABLE "Setting";
|
||||
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "gitCommitHash" TEXT;
|
||||
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `isSystemWide` on the `DockerRegistry` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_DockerRegistry" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"url" TEXT NOT NULL,
|
||||
"username" TEXT,
|
||||
"password" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"teamId" TEXT,
|
||||
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_DockerRegistry" ("createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username") SELECT "createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username" FROM "DockerRegistry";
|
||||
DROP TABLE "DockerRegistry";
|
||||
ALTER TABLE "new_DockerRegistry" RENAME TO "DockerRegistry";
|
||||
CREATE TABLE "new_Application" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"fqdn" TEXT,
|
||||
"repository" TEXT,
|
||||
"configHash" TEXT,
|
||||
"branch" TEXT,
|
||||
"buildPack" TEXT,
|
||||
"projectId" INTEGER,
|
||||
"port" INTEGER,
|
||||
"exposePort" INTEGER,
|
||||
"installCommand" TEXT,
|
||||
"buildCommand" TEXT,
|
||||
"startCommand" TEXT,
|
||||
"baseDirectory" TEXT,
|
||||
"publishDirectory" TEXT,
|
||||
"deploymentType" TEXT,
|
||||
"phpModules" TEXT,
|
||||
"pythonWSGI" TEXT,
|
||||
"pythonModule" TEXT,
|
||||
"pythonVariable" TEXT,
|
||||
"dockerFileLocation" TEXT,
|
||||
"denoMainFile" TEXT,
|
||||
"denoOptions" TEXT,
|
||||
"dockerComposeFile" TEXT,
|
||||
"dockerComposeFileLocation" TEXT,
|
||||
"dockerComposeConfiguration" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"destinationDockerId" TEXT,
|
||||
"gitSourceId" TEXT,
|
||||
"gitCommitHash" TEXT,
|
||||
"baseImage" TEXT,
|
||||
"baseBuildImage" TEXT,
|
||||
"dockerRegistryId" TEXT,
|
||||
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||
DROP TABLE "Application";
|
||||
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "simpleDockerfile" TEXT;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Application" ADD COLUMN "dockerRegistryImageName" TEXT;
|
||||
@@ -1,6 +1,6 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
binaryTargets = ["native", "linux-musl"]
|
||||
binaryTargets = ["native"]
|
||||
}
|
||||
|
||||
datasource db {
|
||||
@@ -8,24 +8,40 @@ datasource db {
|
||||
url = env("COOLIFY_DATABASE_URL")
|
||||
}
|
||||
|
||||
model Certificate {
|
||||
id String @id @default(cuid())
|
||||
key String
|
||||
cert String
|
||||
team Team? @relation(fields: [teamId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
teamId String?
|
||||
}
|
||||
|
||||
model Setting {
|
||||
id String @id @default(cuid())
|
||||
fqdn String? @unique
|
||||
isRegistrationEnabled Boolean @default(false)
|
||||
dualCerts Boolean @default(false)
|
||||
minPort Int @default(9000)
|
||||
maxPort Int @default(9100)
|
||||
proxyPassword String
|
||||
proxyUser String
|
||||
proxyHash String?
|
||||
isAutoUpdateEnabled Boolean @default(false)
|
||||
isDNSCheckEnabled Boolean @default(true)
|
||||
isTraefikUsed Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
ipv4 String?
|
||||
ipv6 String?
|
||||
arch String?
|
||||
id String @id @default(cuid())
|
||||
fqdn String? @unique
|
||||
dualCerts Boolean @default(false)
|
||||
minPort Int @default(9000)
|
||||
maxPort Int @default(9100)
|
||||
DNSServers String @default("1.1.1.1,8.8.8.8")
|
||||
ipv4 String?
|
||||
ipv6 String?
|
||||
arch String?
|
||||
concurrentBuilds Int @default(1)
|
||||
applicationStoragePathMigrationFinished Boolean @default(false)
|
||||
numberOfDockerImagesKeptLocally Int @default(3)
|
||||
proxyDefaultRedirect String?
|
||||
doNotTrack Boolean @default(false)
|
||||
sentryDSN String?
|
||||
previewSeparator String @default(".")
|
||||
isAPIDebuggingEnabled Boolean @default(false)
|
||||
isRegistrationEnabled Boolean @default(true)
|
||||
isAutoUpdateEnabled Boolean @default(false)
|
||||
isDNSCheckEnabled Boolean @default(true)
|
||||
isTraefikUsed Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model User {
|
||||
@@ -67,6 +83,8 @@ model Team {
|
||||
gitLabApps GitlabApp[]
|
||||
service Service[]
|
||||
users User[]
|
||||
certificate Certificate[]
|
||||
dockerRegistry DockerRegistry[]
|
||||
}
|
||||
|
||||
model TeamInvitation {
|
||||
@@ -80,59 +98,104 @@ model TeamInvitation {
|
||||
}
|
||||
|
||||
model Application {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String?
|
||||
repository String?
|
||||
configHash String?
|
||||
branch String?
|
||||
buildPack String?
|
||||
projectId Int?
|
||||
port Int?
|
||||
exposePort Int?
|
||||
installCommand String?
|
||||
buildCommand String?
|
||||
startCommand String?
|
||||
baseDirectory String?
|
||||
publishDirectory String?
|
||||
deploymentType String?
|
||||
phpModules String?
|
||||
pythonWSGI String?
|
||||
pythonModule String?
|
||||
pythonVariable String?
|
||||
dockerFileLocation String?
|
||||
denoMainFile String?
|
||||
denoOptions String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
baseImage String?
|
||||
baseBuildImage String?
|
||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
persistentStorage ApplicationPersistentStorage[]
|
||||
settings ApplicationSettings?
|
||||
secrets Secret[]
|
||||
teams Team[]
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
fqdn String?
|
||||
repository String?
|
||||
configHash String?
|
||||
branch String?
|
||||
buildPack String?
|
||||
projectId Int?
|
||||
port Int?
|
||||
exposePort Int?
|
||||
installCommand String?
|
||||
buildCommand String?
|
||||
startCommand String?
|
||||
baseDirectory String?
|
||||
publishDirectory String?
|
||||
deploymentType String?
|
||||
phpModules String?
|
||||
pythonWSGI String?
|
||||
pythonModule String?
|
||||
pythonVariable String?
|
||||
dockerFileLocation String?
|
||||
denoMainFile String?
|
||||
denoOptions String?
|
||||
dockerComposeFile String?
|
||||
dockerComposeFileLocation String?
|
||||
dockerComposeConfiguration String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
gitCommitHash String?
|
||||
baseImage String?
|
||||
baseBuildImage String?
|
||||
settings ApplicationSettings?
|
||||
dockerRegistryId String?
|
||||
dockerRegistryImageName String?
|
||||
simpleDockerfile String?
|
||||
|
||||
persistentStorage ApplicationPersistentStorage[]
|
||||
secrets Secret[]
|
||||
teams Team[]
|
||||
connectedDatabase ApplicationConnectedDatabase?
|
||||
previewApplication PreviewApplication[]
|
||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
dockerRegistry DockerRegistry? @relation(fields: [dockerRegistryId], references: [id])
|
||||
}
|
||||
|
||||
model PreviewApplication {
|
||||
id String @id @default(cuid())
|
||||
pullmergeRequestId String
|
||||
sourceBranch String
|
||||
isRandomDomain Boolean @default(false)
|
||||
customDomain String?
|
||||
applicationId String
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model ApplicationConnectedDatabase {
|
||||
id String @id @default(cuid())
|
||||
applicationId String @unique
|
||||
databaseId String?
|
||||
hostedDatabaseType String?
|
||||
hostedDatabaseHost String?
|
||||
hostedDatabasePort Int?
|
||||
hostedDatabaseName String?
|
||||
hostedDatabaseUser String?
|
||||
hostedDatabasePassword String?
|
||||
hostedDatabaseDBName String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database? @relation(fields: [databaseId], references: [id])
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
}
|
||||
|
||||
model ApplicationSettings {
|
||||
id String @id @default(cuid())
|
||||
applicationId String @unique
|
||||
dualCerts Boolean @default(false)
|
||||
debug Boolean @default(false)
|
||||
previews Boolean @default(false)
|
||||
autodeploy Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
id String @id @default(cuid())
|
||||
applicationId String @unique
|
||||
dualCerts Boolean @default(false)
|
||||
debug Boolean @default(false)
|
||||
previews Boolean @default(false)
|
||||
autodeploy Boolean @default(true)
|
||||
isBot Boolean @default(false)
|
||||
isPublicRepository Boolean @default(false)
|
||||
isDBBranching Boolean @default(false)
|
||||
isCustomSSL Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
}
|
||||
|
||||
model ApplicationPersistentStorage {
|
||||
id String @id @default(cuid())
|
||||
applicationId String
|
||||
path String
|
||||
oldPath Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
@@ -141,14 +204,17 @@ model ApplicationPersistentStorage {
|
||||
}
|
||||
|
||||
model ServicePersistentStorage {
|
||||
id String @id @default(cuid())
|
||||
serviceId String
|
||||
path String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
id String @id @default(cuid())
|
||||
serviceId String
|
||||
path String
|
||||
volumeName String?
|
||||
predefined Boolean @default(false)
|
||||
containerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
|
||||
@@unique([serviceId, path])
|
||||
@@unique([serviceId, containerId, path])
|
||||
}
|
||||
|
||||
model Secret {
|
||||
@@ -182,22 +248,27 @@ model BuildLog {
|
||||
applicationId String?
|
||||
buildId String
|
||||
line String
|
||||
time Int
|
||||
time BigInt
|
||||
}
|
||||
|
||||
model Build {
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
branch String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
pullmergeRequestId String?
|
||||
previewApplicationId String?
|
||||
forceRebuild Boolean @default(false)
|
||||
sourceBranch String?
|
||||
sourceRepository String?
|
||||
branch String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DestinationDocker {
|
||||
@@ -233,9 +304,23 @@ model SshKey {
|
||||
destinationDocker DestinationDocker[]
|
||||
}
|
||||
|
||||
model DockerRegistry {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
url String
|
||||
username String?
|
||||
password String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
teamId String?
|
||||
team Team? @relation(fields: [teamId], references: [id])
|
||||
application Application[]
|
||||
}
|
||||
|
||||
model GitSource {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
forPublic Boolean @default(false)
|
||||
type String?
|
||||
apiUrl String?
|
||||
htmlUrl String?
|
||||
@@ -245,6 +330,7 @@ model GitSource {
|
||||
updatedAt DateTime @updatedAt
|
||||
githubAppId String? @unique
|
||||
gitlabAppId String? @unique
|
||||
isSystemWide Boolean @default(false)
|
||||
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
|
||||
githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
|
||||
application Application[]
|
||||
@@ -283,22 +369,36 @@ model GitlabApp {
|
||||
}
|
||||
|
||||
model Database {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
destinationDockerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
settings DatabaseSettings?
|
||||
teams Team[]
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
destinationDockerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
settings DatabaseSettings?
|
||||
teams Team[]
|
||||
applicationConnectedDatabase ApplicationConnectedDatabase[]
|
||||
databaseSecret DatabaseSecret[]
|
||||
}
|
||||
|
||||
model DatabaseSecret {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
value String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
databaseId String
|
||||
database Database @relation(fields: [databaseId], references: [id])
|
||||
|
||||
@@unique([name, databaseId])
|
||||
}
|
||||
|
||||
model DatabaseSettings {
|
||||
@@ -319,23 +419,44 @@ model Service {
|
||||
dualCerts Boolean @default(false)
|
||||
type String?
|
||||
version String?
|
||||
templateVersion String @default("0.0.0")
|
||||
destinationDockerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
fider Fider?
|
||||
ghost Ghost?
|
||||
hasura Hasura?
|
||||
meiliSearch MeiliSearch?
|
||||
minio Minio?
|
||||
moodle Moodle?
|
||||
plausibleAnalytics PlausibleAnalytics?
|
||||
persistentStorage ServicePersistentStorage[]
|
||||
serviceSecret ServiceSecret[]
|
||||
umami Umami?
|
||||
vscodeserver Vscodeserver?
|
||||
wordpress Wordpress?
|
||||
serviceSetting ServiceSetting[]
|
||||
teams Team[]
|
||||
|
||||
fider Fider?
|
||||
ghost Ghost?
|
||||
glitchTip GlitchTip?
|
||||
hasura Hasura?
|
||||
meiliSearch MeiliSearch?
|
||||
minio Minio?
|
||||
moodle Moodle?
|
||||
plausibleAnalytics PlausibleAnalytics?
|
||||
umami Umami?
|
||||
vscodeserver Vscodeserver?
|
||||
wordpress Wordpress?
|
||||
appwrite Appwrite?
|
||||
searxng Searxng?
|
||||
weblate Weblate?
|
||||
taiga Taiga?
|
||||
}
|
||||
|
||||
model ServiceSetting {
|
||||
id String @id @default(cuid())
|
||||
serviceId String
|
||||
name String
|
||||
value String
|
||||
variableName String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
|
||||
@@unique([serviceId, name])
|
||||
}
|
||||
|
||||
model PlausibleAnalytics {
|
||||
@@ -383,10 +504,10 @@ model Wordpress {
|
||||
ownMysql Boolean @default(false)
|
||||
mysqlHost String?
|
||||
mysqlPort Int?
|
||||
mysqlUser String
|
||||
mysqlPassword String
|
||||
mysqlRootUser String
|
||||
mysqlRootUserPassword String
|
||||
mysqlUser String?
|
||||
mysqlPassword String?
|
||||
mysqlRootUser String?
|
||||
mysqlRootUserPassword String?
|
||||
mysqlDatabase String?
|
||||
mysqlPublicPort Int?
|
||||
ftpEnabled Boolean @default(false)
|
||||
@@ -491,3 +612,94 @@ model Moodle {
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Appwrite {
|
||||
id String @id @default(cuid())
|
||||
serviceId String @unique
|
||||
opensslKeyV1 String
|
||||
executorSecret String
|
||||
redisPassword String
|
||||
mariadbHost String?
|
||||
mariadbPort Int @default(3306)
|
||||
mariadbUser String
|
||||
mariadbPassword String
|
||||
mariadbRootUser String
|
||||
mariadbRootUserPassword String
|
||||
mariadbDatabase String
|
||||
mariadbPublicPort Int?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model GlitchTip {
|
||||
id String @id @default(cuid())
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
secretKeyBase String?
|
||||
defaultEmail String
|
||||
defaultUsername String
|
||||
defaultPassword String
|
||||
defaultEmailFrom String @default("glitchtip@domain.tdl")
|
||||
emailSmtpHost String? @default("domain.tdl")
|
||||
emailSmtpPort Int? @default(25)
|
||||
emailSmtpUser String?
|
||||
emailSmtpPassword String?
|
||||
emailSmtpUseTls Boolean @default(false)
|
||||
emailSmtpUseSsl Boolean @default(false)
|
||||
emailBackend String?
|
||||
mailgunApiKey String?
|
||||
sendgridApiKey String?
|
||||
enableOpenUserRegistration Boolean @default(true)
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Searxng {
|
||||
id String @id @default(cuid())
|
||||
secretKey String
|
||||
redisPassword String
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Weblate {
|
||||
id String @id @default(cuid())
|
||||
adminPassword String
|
||||
postgresqlHost String
|
||||
postgresqlPort Int
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Taiga {
|
||||
id String @id @default(cuid())
|
||||
secretKey String
|
||||
erlangSecret String
|
||||
djangoAdminPassword String
|
||||
djangoAdminUser String
|
||||
rabbitMQUser String
|
||||
rabbitMQPassword String
|
||||
postgresqlHost String
|
||||
postgresqlPort Int
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
@@ -1,31 +1,18 @@
|
||||
const dotEnvExtended = require('dotenv-extended');
|
||||
dotEnvExtended.load();
|
||||
const crypto = require('crypto');
|
||||
const generator = require('generate-password');
|
||||
const cuid = require('cuid');
|
||||
const { PrismaClient } = require('@prisma/client');
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
function generatePassword(length = 24) {
|
||||
return generator.generate({
|
||||
length,
|
||||
numbers: true,
|
||||
strict: true
|
||||
});
|
||||
}
|
||||
const algorithm = 'aes-256-ctr';
|
||||
|
||||
async function main() {
|
||||
// Enable registration for the first user
|
||||
// Set initial HAProxy password
|
||||
const settingsFound = await prisma.setting.findFirst({});
|
||||
if (!settingsFound) {
|
||||
await prisma.setting.create({
|
||||
data: {
|
||||
isRegistrationEnabled: true,
|
||||
proxyPassword: encrypt(generatePassword()),
|
||||
proxyUser: cuid(),
|
||||
arch: process.arch
|
||||
id: '0',
|
||||
arch: process.arch,
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@@ -34,11 +21,11 @@ async function main() {
|
||||
id: settingsFound.id
|
||||
},
|
||||
data: {
|
||||
isTraefikUsed: true,
|
||||
proxyHash: null
|
||||
id: '0'
|
||||
}
|
||||
});
|
||||
}
|
||||
// Create local docker engine
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine: '/var/run/docker.sock' }
|
||||
});
|
||||
@@ -55,17 +42,54 @@ async function main() {
|
||||
|
||||
// Set auto-update based on env variable
|
||||
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
if (settings) {
|
||||
await prisma.setting.update({
|
||||
where: {
|
||||
id: settings.id
|
||||
},
|
||||
await prisma.setting.update({
|
||||
where: {
|
||||
id: '0'
|
||||
},
|
||||
data: {
|
||||
isAutoUpdateEnabled
|
||||
}
|
||||
});
|
||||
// Create public github source
|
||||
const github = await prisma.gitSource.findFirst({
|
||||
where: { htmlUrl: 'https://github.com', forPublic: true }
|
||||
});
|
||||
if (!github) {
|
||||
await prisma.gitSource.create({
|
||||
data: {
|
||||
isAutoUpdateEnabled
|
||||
apiUrl: 'https://api.github.com',
|
||||
htmlUrl: 'https://github.com',
|
||||
forPublic: true,
|
||||
name: 'Github Public',
|
||||
type: 'github'
|
||||
}
|
||||
});
|
||||
}
|
||||
// Create public gitlab source
|
||||
const gitlab = await prisma.gitSource.findFirst({
|
||||
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
|
||||
});
|
||||
if (!gitlab) {
|
||||
await prisma.gitSource.create({
|
||||
data: {
|
||||
apiUrl: 'https://gitlab.com/api/v4',
|
||||
htmlUrl: 'https://gitlab.com',
|
||||
forPublic: true,
|
||||
name: 'Gitlab Public',
|
||||
type: 'gitlab'
|
||||
}
|
||||
});
|
||||
}
|
||||
// Set new preview secrets
|
||||
const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } })
|
||||
if (secrets.length > 0) {
|
||||
for (const secret of secrets) {
|
||||
const previewSecrets = await prisma.secret.findMany({ where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true } })
|
||||
if (previewSecrets.length === 0) {
|
||||
await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
main()
|
||||
.catch((e) => {
|
||||
|
||||
67
apps/api/scripts/generateTags.mjs
Normal file
67
apps/api/scripts/generateTags.mjs
Normal file
@@ -0,0 +1,67 @@
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import got from 'got';
|
||||
|
||||
const repositories = [];
|
||||
const templates = await fs.readFile('./apps/api/devTemplates.yaml', 'utf8');
|
||||
const devTemplates = yaml.load(templates);
|
||||
for (const template of devTemplates) {
|
||||
let image = template.services['$$id'].image.replaceAll(':$$core_version', '');
|
||||
if (!image.includes('/')) {
|
||||
image = `library/${image}`;
|
||||
}
|
||||
repositories.push({ image, name: template.type });
|
||||
}
|
||||
const services = []
|
||||
const numberOfTags = 30;
|
||||
// const semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/g)
|
||||
for (const repository of repositories) {
|
||||
console.log('Querying', repository.name, 'at', repository.image);
|
||||
let semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)$/g)
|
||||
if (repository.name.startsWith('wordpress')) {
|
||||
semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-php(0|[1-9]\d*)$/g)
|
||||
}
|
||||
if (repository.name.startsWith('minio')) {
|
||||
semverRegex = new RegExp(/^RELEASE.*$/g)
|
||||
}
|
||||
if (repository.name.startsWith('fider')) {
|
||||
semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-([0-9]+)$/g)
|
||||
}
|
||||
if (repository.name.startsWith('searxng')) {
|
||||
semverRegex = new RegExp(/^\d{4}[\.\-](0?[1-9]|[12][0-9]|3[01])[\.\-](0?[1-9]|1[012]).*$/)
|
||||
}
|
||||
if (repository.name.startsWith('umami')) {
|
||||
semverRegex = new RegExp(/^postgresql-v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-([0-9]+)$/g)
|
||||
}
|
||||
if (repository.image.includes('ghcr.io')) {
|
||||
const { execaCommand } = await import('execa');
|
||||
const { stdout } = await execaCommand(`docker run --rm quay.io/skopeo/stable list-tags docker://${repository.image}`);
|
||||
if (stdout) {
|
||||
const json = JSON.parse(stdout);
|
||||
const semverTags = json.Tags.filter((tag) => semverRegex.test(tag))
|
||||
let tags = semverTags.length > 10 ? semverTags.sort().reverse().slice(0, numberOfTags) : json.Tags.sort().reverse().slice(0, numberOfTags)
|
||||
if (!tags.includes('latest')) {
|
||||
tags.push('latest')
|
||||
}
|
||||
services.push({ name: repository.name, image: repository.image, tags })
|
||||
}
|
||||
} else {
|
||||
const { token } = await got.get(`https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repository.image}:pull`).json()
|
||||
let data = await got.get(`https://registry-1.docker.io/v2/${repository.image}/tags/list`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
}).json()
|
||||
const semverTags = data.tags.filter((tag) => semverRegex.test(tag))
|
||||
let tags = semverTags.length > 10 ? semverTags.sort().reverse().slice(0, numberOfTags) : data.tags.sort().reverse().slice(0, numberOfTags)
|
||||
if (!tags.includes('latest')) {
|
||||
tags.push('latest')
|
||||
}
|
||||
services.push({
|
||||
name: repository.name,
|
||||
image: repository.image,
|
||||
tags
|
||||
})
|
||||
}
|
||||
}
|
||||
await fs.writeFile('./apps/api/devTags.json', JSON.stringify(services));
|
||||
@@ -3,18 +3,29 @@ import cors from '@fastify/cors';
|
||||
import serve from '@fastify/static';
|
||||
import env from '@fastify/env';
|
||||
import cookie from '@fastify/cookie';
|
||||
import multipart from '@fastify/multipart';
|
||||
import path, { join } from 'path';
|
||||
import autoLoad from '@fastify/autoload';
|
||||
import { asyncExecShell, isDev, listSettings, prisma } from './lib/common';
|
||||
import { scheduler } from './lib/scheduler';
|
||||
import socketIO from 'fastify-socket.io'
|
||||
import socketIOServer from './realtime'
|
||||
|
||||
import { cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, executeCommand, generateDatabaseConfiguration, isDev, listSettings, prisma, sentryDSN, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
|
||||
import { scheduler } from './lib/scheduler';
|
||||
import { compareVersions } from 'compare-versions';
|
||||
import Graceful from '@ladjs/graceful'
|
||||
import yaml from 'js-yaml'
|
||||
import fs from 'fs/promises';
|
||||
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
|
||||
import { checkContainer } from './lib/docker';
|
||||
import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
|
||||
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
|
||||
import * as Sentry from '@sentry/node';
|
||||
declare module 'fastify' {
|
||||
interface FastifyInstance {
|
||||
config: {
|
||||
COOLIFY_APP_ID: string,
|
||||
COOLIFY_SECRET_KEY: string,
|
||||
COOLIFY_DATABASE_URL: string,
|
||||
COOLIFY_SENTRY_DSN: string,
|
||||
COOLIFY_IS_ON: string,
|
||||
COOLIFY_WHITE_LABELED: string,
|
||||
COOLIFY_WHITE_LABELED_ICON: string | null,
|
||||
@@ -25,143 +36,502 @@ declare module 'fastify' {
|
||||
|
||||
const port = isDev ? 3001 : 3000;
|
||||
const host = '0.0.0.0';
|
||||
const fastify = Fastify({
|
||||
logger: false,
|
||||
trustProxy: true
|
||||
});
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||
properties: {
|
||||
COOLIFY_APP_ID: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_SECRET_KEY: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_DATABASE_URL: {
|
||||
type: 'string',
|
||||
default: 'file:../db/dev.db'
|
||||
},
|
||||
COOLIFY_SENTRY_DSN: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_IS_ON: {
|
||||
type: 'string',
|
||||
default: 'docker'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED_ICON: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_AUTO_UPDATE: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
const options = {
|
||||
schema,
|
||||
dotenv: true
|
||||
};
|
||||
fastify.register(env, options);
|
||||
if (!isDev) {
|
||||
fastify.register(serve, {
|
||||
root: path.join(__dirname, './public'),
|
||||
preCompressed: true
|
||||
(async () => {
|
||||
const settings = await prisma.setting.findFirst()
|
||||
const fastify = Fastify({
|
||||
logger: settings?.isAPIDebuggingEnabled || false,
|
||||
trustProxy: true
|
||||
});
|
||||
fastify.setNotFoundHandler(async function (request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||
properties: {
|
||||
COOLIFY_APP_ID: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_SECRET_KEY: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_DATABASE_URL: {
|
||||
type: 'string',
|
||||
default: 'file:../db/dev.db'
|
||||
},
|
||||
COOLIFY_IS_ON: {
|
||||
type: 'string',
|
||||
default: 'docker'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED_ICON: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_AUTO_UPDATE: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
|
||||
}
|
||||
return reply.status(200).sendFile('index.html');
|
||||
};
|
||||
const options = {
|
||||
schema,
|
||||
dotenv: true
|
||||
};
|
||||
fastify.register(env, options);
|
||||
if (!isDev) {
|
||||
fastify.register(serve, {
|
||||
root: path.join(__dirname, './public'),
|
||||
preCompressed: true
|
||||
});
|
||||
fastify.setNotFoundHandler(async function (request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
}
|
||||
return reply.status(200).sendFile('index.html');
|
||||
});
|
||||
}
|
||||
fastify.register(multipart, { limits: { fileSize: 100000 } });
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'plugins')
|
||||
});
|
||||
}
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'plugins')
|
||||
});
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'routes')
|
||||
});
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'routes')
|
||||
});
|
||||
fastify.register(cookie)
|
||||
fastify.register(cors);
|
||||
fastify.register(socketIO, {
|
||||
cors: {
|
||||
origin: isDev ? "*" : ''
|
||||
}
|
||||
})
|
||||
// To detect allowed origins
|
||||
// fastify.addHook('onRequest', async (request, reply) => {
|
||||
// console.log(request.headers.host)
|
||||
// let allowedList = ['coolify:3000'];
|
||||
// const { ipv4, ipv6, fqdn } = await prisma.setting.findFirst({})
|
||||
|
||||
fastify.register(cookie)
|
||||
fastify.register(cors);
|
||||
fastify.listen({ port, host }, async (err: any, address: any) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
// ipv4 && allowedList.push(`${ipv4}:3000`);
|
||||
// ipv6 && allowedList.push(ipv6);
|
||||
// fqdn && allowedList.push(getDomain(fqdn));
|
||||
// isDev && allowedList.push('localhost:3000') && allowedList.push('localhost:3001') && allowedList.push('host.docker.internal:3001');
|
||||
// const remotes = await prisma.destinationDocker.findMany({ where: { remoteEngine: true, remoteVerified: true } })
|
||||
// if (remotes.length > 0) {
|
||||
// remotes.forEach(remote => {
|
||||
// allowedList.push(`${remote.remoteIpAddress}:3000`);
|
||||
// })
|
||||
// }
|
||||
// if (!allowedList.includes(request.headers.host)) {
|
||||
// // console.log('not allowed', request.headers.host)
|
||||
// }
|
||||
// })
|
||||
|
||||
|
||||
try {
|
||||
await fastify.listen({ port, host })
|
||||
await socketIOServer(fastify)
|
||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||
|
||||
migrateServicesToNewTemplate();
|
||||
await migrateApplicationPersistentStorage();
|
||||
await initServer();
|
||||
|
||||
const graceful = new Graceful({ brees: [scheduler] });
|
||||
graceful.listen();
|
||||
|
||||
setInterval(async () => {
|
||||
if (!scheduler.workers.has('deployApplication')) {
|
||||
scheduler.run('deployApplication');
|
||||
}
|
||||
}, 2000)
|
||||
|
||||
// autoUpdater
|
||||
setInterval(async () => {
|
||||
await autoUpdater()
|
||||
}, 60000 * 15)
|
||||
|
||||
// cleanupStorage
|
||||
setInterval(async () => {
|
||||
await cleanupStorage()
|
||||
}, 60000 * 10)
|
||||
|
||||
// checkProxies, checkFluentBit & refresh templates
|
||||
setInterval(async () => {
|
||||
await checkProxies();
|
||||
await checkFluentBit();
|
||||
}, 60000)
|
||||
|
||||
// Refresh and check templates
|
||||
setInterval(async () => {
|
||||
await refreshTemplates()
|
||||
}, 60000)
|
||||
|
||||
setInterval(async () => {
|
||||
await refreshTags()
|
||||
}, 60000)
|
||||
|
||||
setInterval(async () => {
|
||||
await migrateServicesToNewTemplate()
|
||||
}, isDev ? 10000 : 60000)
|
||||
|
||||
setInterval(async () => {
|
||||
await copySSLCertificates();
|
||||
}, 10000)
|
||||
|
||||
await Promise.all([
|
||||
getTagsTemplates(),
|
||||
getArch(),
|
||||
getIPAddress(),
|
||||
configureRemoteDockers(),
|
||||
])
|
||||
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||
await initServer();
|
||||
await scheduler.start('deployApplication');
|
||||
await scheduler.start('cleanupStorage');
|
||||
await scheduler.start('cleanupPrismaEngines');
|
||||
await scheduler.start('checkProxies');
|
||||
})();
|
||||
|
||||
// Check if no build is running
|
||||
|
||||
// Check for update
|
||||
setInterval(async () => {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
if (isAutoUpdateEnabled) {
|
||||
if (scheduler.workers.has('deployApplication')) {
|
||||
scheduler.workers.get('deployApplication').postMessage("status:autoUpdater");
|
||||
}
|
||||
}
|
||||
}, isDev ? 5000 : 60000 * 15)
|
||||
|
||||
// Cleanup storage
|
||||
setInterval(async () => {
|
||||
if (scheduler.workers.has('deployApplication')) {
|
||||
scheduler.workers.get('deployApplication').postMessage("status:cleanupStorage");
|
||||
}
|
||||
}, isDev ? 5000 : 60000 * 10)
|
||||
|
||||
scheduler.on('worker deleted', async (name) => {
|
||||
if (name === 'autoUpdater' || name === 'cleanupStorage') {
|
||||
if (!scheduler.workers.has('deployApplication')) await scheduler.start('deployApplication');
|
||||
}
|
||||
});
|
||||
await getArch();
|
||||
await getIPAddress();
|
||||
});
|
||||
async function getIPAddress() {
|
||||
const { publicIpv4, publicIpv6 } = await import('public-ip')
|
||||
try {
|
||||
const settings = await listSettings();
|
||||
if (!settings.ipv4) {
|
||||
const ipv4 = await publicIpv4({ timeout: 2000 })
|
||||
console.log(`Getting public IPv4 address...`);
|
||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
|
||||
}
|
||||
|
||||
if (!settings.ipv6) {
|
||||
const ipv6 = await publicIpv6({ timeout: 2000 })
|
||||
console.log(`Getting public IPv6 address...`);
|
||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
|
||||
}
|
||||
|
||||
} catch (error) { }
|
||||
}
|
||||
async function initServer() {
|
||||
async function getTagsTemplates() {
|
||||
const { default: got } = await import('got')
|
||||
try {
|
||||
await asyncExecShell(`docker network create --attachable coolify`);
|
||||
} catch (error) { }
|
||||
if (isDev) {
|
||||
const templates = await fs.readFile('./devTemplates.yaml', 'utf8')
|
||||
const tags = await fs.readFile('./devTags.json', 'utf8')
|
||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)))
|
||||
await fs.writeFile('./tags.json', tags)
|
||||
console.log('[004] Tags and templates loaded in dev mode...')
|
||||
} else {
|
||||
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
||||
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
||||
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
||||
await fs.writeFile('/app/tags.json', tags)
|
||||
console.log('[004] Tags and templates loaded...')
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.log("Couldn't get latest templates.")
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
async function initServer() {
|
||||
const appId = process.env['COOLIFY_APP_ID'];
|
||||
const settings = await prisma.setting.findUnique({ where: { id: '0' } })
|
||||
try {
|
||||
if (settings.doNotTrack === true) {
|
||||
console.log('[000] Telemetry disabled...')
|
||||
|
||||
} else {
|
||||
if (settings.sentryDSN !== sentryDSN) {
|
||||
await prisma.setting.update({ where: { id: '0' }, data: { sentryDSN } })
|
||||
}
|
||||
// Initialize Sentry
|
||||
// Sentry.init({
|
||||
// dsn: sentryDSN,
|
||||
// environment: isDev ? 'development' : 'production',
|
||||
// release: version
|
||||
// });
|
||||
// console.log('[000] Sentry initialized...')
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
try {
|
||||
console.log(`[001] Initializing server...`);
|
||||
await executeCommand({ command: `docker network create --attachable coolify` });
|
||||
} catch (error) { }
|
||||
try {
|
||||
console.log(`[002] Cleanup stucked builds...`);
|
||||
const isOlder = compareVersions('3.8.1', version);
|
||||
if (isOlder === 1) {
|
||||
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
|
||||
}
|
||||
} catch (error) { }
|
||||
try {
|
||||
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
||||
await fs.rm('/tmp/build-sources', { recursive: true, force: true })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
|
||||
async function getArch() {
|
||||
try {
|
||||
const settings = await prisma.setting.findFirst({})
|
||||
if (settings && !settings.arch) {
|
||||
console.log(`Getting architecture...`);
|
||||
await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } })
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
|
||||
async function configureRemoteDockers() {
|
||||
try {
|
||||
const remoteDocker = await prisma.destinationDocker.findMany({
|
||||
where: { remoteVerified: true, remoteEngine: true }
|
||||
});
|
||||
if (remoteDocker.length > 0) {
|
||||
console.log(`Verifying Remote Docker Engines...`);
|
||||
for (const docker of remoteDocker) {
|
||||
console.log('Verifying:', docker.remoteIpAddress)
|
||||
await verifyRemoteDockerEngineFn(docker.id);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
|
||||
async function autoUpdater() {
|
||||
try {
|
||||
const { default: got } = await import('got')
|
||||
const currentVersion = version;
|
||||
const { coolify } = await got.get('https://get.coollabs.io/versions.json', {
|
||||
searchParams: {
|
||||
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||
version: currentVersion
|
||||
}
|
||||
}).json()
|
||||
const latestVersion = coolify.main.version;
|
||||
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||
if (isUpdateAvailable === 1) {
|
||||
const activeCount = 0
|
||||
if (activeCount === 0) {
|
||||
if (!isDev) {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
if (isAutoUpdateEnabled) {
|
||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` })
|
||||
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` })
|
||||
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` })
|
||||
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` })
|
||||
}
|
||||
} else {
|
||||
console.log('Updating (not really in dev mode).');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkFluentBit() {
|
||||
try {
|
||||
if (!isDev) {
|
||||
const engine = '/var/run/docker.sock';
|
||||
const { id } = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify' }
|
||||
});
|
||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
|
||||
if (!found) {
|
||||
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
||||
await executeCommand({ command: `docker compose up -d fluent-bit` });
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
async function checkProxies() {
|
||||
try {
|
||||
const { default: isReachable } = await import('is-port-reachable');
|
||||
let portReachable;
|
||||
|
||||
const { arch, ipv4, ipv6 } = await listSettings();
|
||||
|
||||
// Coolify Proxy local
|
||||
const engine = '/var/run/docker.sock';
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
|
||||
});
|
||||
if (localDocker) {
|
||||
portReachable = await isReachable(80, { host: ipv4 || ipv6 })
|
||||
if (!portReachable) {
|
||||
await startTraefikProxy(localDocker.id);
|
||||
}
|
||||
}
|
||||
// Coolify Proxy remote
|
||||
const remoteDocker = await prisma.destinationDocker.findMany({
|
||||
where: { remoteEngine: true, remoteVerified: true }
|
||||
});
|
||||
if (remoteDocker.length > 0) {
|
||||
for (const docker of remoteDocker) {
|
||||
if (docker.isCoolifyProxyUsed) {
|
||||
portReachable = await isReachable(80, { host: docker.remoteIpAddress })
|
||||
if (!portReachable) {
|
||||
await startTraefikProxy(docker.id);
|
||||
}
|
||||
}
|
||||
try {
|
||||
await createRemoteEngineConfiguration(docker.id)
|
||||
} catch (error) { }
|
||||
}
|
||||
}
|
||||
// TCP Proxies
|
||||
const databasesWithPublicPort = await prisma.database.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
include: { settings: true, destinationDocker: true }
|
||||
});
|
||||
for (const database of databasesWithPublicPort) {
|
||||
const { destinationDockerId, destinationDocker, publicPort, id } = database;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
const { privatePort } = generateDatabaseConfiguration(database, arch);
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
}
|
||||
}
|
||||
const wordpressWithFtp = await prisma.wordpress.findMany({
|
||||
where: { ftpPublicPort: { not: null } },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
for (const ftp of wordpressWithFtp) {
|
||||
const { service, ftpPublicPort } = ftp;
|
||||
const { destinationDockerId, destinationDocker, id } = service;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP Proxies
|
||||
// const minioInstances = await prisma.minio.findMany({
|
||||
// where: { publicPort: { not: null } },
|
||||
// include: { service: { include: { destinationDocker: true } } }
|
||||
// });
|
||||
// for (const minio of minioInstances) {
|
||||
// const { service, publicPort } = minio;
|
||||
// const { destinationDockerId, destinationDocker, id } = service;
|
||||
// if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
// await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
|
||||
// }
|
||||
// }
|
||||
} catch (error) {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
async function copySSLCertificates() {
|
||||
try {
|
||||
const pAll = await import('p-all');
|
||||
const actions = []
|
||||
const certificates = await prisma.certificate.findMany({ include: { team: true } })
|
||||
const teamIds = certificates.map(c => c.teamId)
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } } })
|
||||
for (const certificate of certificates) {
|
||||
const { id, key, cert } = certificate
|
||||
const decryptedKey = decrypt(key)
|
||||
await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey)
|
||||
await fs.writeFile(`/tmp/${id}-cert.pem`, cert)
|
||||
for (const destination of destinations) {
|
||||
if (destination.remoteEngine) {
|
||||
if (destination.remoteVerified) {
|
||||
const { id: dockerId, remoteIpAddress } = destination
|
||||
actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress))
|
||||
}
|
||||
} else {
|
||||
actions.push(async () => copyLocalCertificates(id))
|
||||
}
|
||||
}
|
||||
}
|
||||
await pAll.default(actions, { concurrency: 1 })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` })
|
||||
}
|
||||
}
|
||||
|
||||
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
||||
try {
|
||||
await executeCommand({ command: `scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/` })
|
||||
await executeCommand({ sshCommand: true, shell: true, dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
||||
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
} catch (error) {
|
||||
console.log({ error })
|
||||
}
|
||||
}
|
||||
async function copyLocalCertificates(id: string) {
|
||||
try {
|
||||
await executeCommand({ command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`, shell: true })
|
||||
await executeCommand({ command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
await executeCommand({ command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
} catch (error) {
|
||||
console.log({ error })
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanupStorage() {
|
||||
const destinationDockers = await prisma.destinationDocker.findMany();
|
||||
let enginesDone = new Set()
|
||||
for (const destination of destinationDockers) {
|
||||
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
|
||||
if (destination.engine) enginesDone.add(destination.engine)
|
||||
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
|
||||
|
||||
let lowDiskSpace = false;
|
||||
try {
|
||||
let stdout = null
|
||||
if (!isDev) {
|
||||
const output = await executeCommand({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`, shell: true })
|
||||
stdout = output.stdout;
|
||||
} else {
|
||||
const output = await executeCommand({
|
||||
command:
|
||||
`df -kPT /`
|
||||
});
|
||||
stdout = output.stdout;
|
||||
}
|
||||
let lines = stdout.trim().split('\n');
|
||||
let header = lines[0];
|
||||
let regex =
|
||||
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||
const boundaries = [];
|
||||
let match;
|
||||
|
||||
while ((match = regex.exec(header))) {
|
||||
boundaries.push(match[0].length);
|
||||
}
|
||||
|
||||
boundaries[boundaries.length - 1] = -1;
|
||||
const data = lines.slice(1).map((line) => {
|
||||
const cl = boundaries.map((boundary) => {
|
||||
const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||
line = line.slice(boundary);
|
||||
return column.trim();
|
||||
});
|
||||
return {
|
||||
capacity: Number.parseInt(cl[5], 10) / 100
|
||||
};
|
||||
});
|
||||
if (data.length > 0) {
|
||||
const { capacity } = data[0];
|
||||
if (capacity > 0.8) {
|
||||
lowDiskSpace = true;
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
import axios from 'axios';
|
||||
import compareVersions from 'compare-versions';
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { asyncExecShell, asyncSleep, isDev, prisma, version } from '../lib/common';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
try {
|
||||
const currentVersion = version;
|
||||
const { data: versions } = await axios
|
||||
.get(
|
||||
`https://get.coollabs.io/versions.json`
|
||||
, {
|
||||
params: {
|
||||
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||
version: currentVersion
|
||||
}
|
||||
})
|
||||
const latestVersion = versions['coolify'].main.version;
|
||||
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||
if (isUpdateAvailable === 1) {
|
||||
const activeCount = 0
|
||||
if (activeCount === 0) {
|
||||
if (!isDev) {
|
||||
console.log(`Updating Coolify to ${latestVersion}.`);
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||
);
|
||||
} else {
|
||||
console.log('Updating (not really in dev mode).');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
} else process.exit(0);
|
||||
})();
|
||||
@@ -1,96 +0,0 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, executeDockerCmd, listSettings } from '../lib/common';
|
||||
import { checkContainer } from '../lib/docker';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
try {
|
||||
const { arch } = await listSettings();
|
||||
// Coolify Proxy local
|
||||
const engine = '/var/run/docker.sock';
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify' }
|
||||
});
|
||||
if (localDocker && localDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer({ dockerId: localDocker.id, container: 'coolify-haproxy' });
|
||||
if (found) {
|
||||
await executeDockerCmd({
|
||||
dockerId: localDocker.id,
|
||||
command: `docker stop -t 0 coolify-haproxy && docker rm coolify-haproxy`
|
||||
})
|
||||
}
|
||||
await startTraefikProxy(localDocker.id);
|
||||
}
|
||||
|
||||
// TCP Proxies
|
||||
const databasesWithPublicPort = await prisma.database.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
include: { settings: true, destinationDocker: true }
|
||||
});
|
||||
for (const database of databasesWithPublicPort) {
|
||||
const { destinationDockerId, destinationDocker, publicPort, id } = database;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
const { privatePort } = generateDatabaseConfiguration(database, arch);
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer({
|
||||
dockerId: localDocker.id, container: `haproxy-for-${publicPort}`
|
||||
});
|
||||
if (found) {
|
||||
await executeDockerCmd({
|
||||
dockerId: localDocker.id,
|
||||
command: `docker stop -t 0 haproxy-for-${publicPort} && docker rm haproxy-for-${publicPort}`
|
||||
})
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
}
|
||||
}
|
||||
const wordpressWithFtp = await prisma.wordpress.findMany({
|
||||
where: { ftpPublicPort: { not: null } },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
for (const ftp of wordpressWithFtp) {
|
||||
const { service, ftpPublicPort } = ftp;
|
||||
const { destinationDockerId, destinationDocker, id } = service;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer({ dockerId: localDocker.id, container: `haproxy-for-${ftpPublicPort}` });
|
||||
if (found) {
|
||||
await executeDockerCmd({
|
||||
dockerId: localDocker.id,
|
||||
command: `docker stop -t 0 haproxy -for-${ftpPublicPort} && docker rm haproxy-for-${ftpPublicPort}`
|
||||
})
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP Proxies
|
||||
const minioInstances = await prisma.minio.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
include: { service: { include: { destinationDocker: true } } }
|
||||
});
|
||||
for (const minio of minioInstances) {
|
||||
const { service, publicPort } = minio;
|
||||
const { destinationDockerId, destinationDocker, id } = service;
|
||||
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||
// Remove HAProxy
|
||||
const found = await checkContainer({ dockerId: localDocker.id, container: `${id}-${publicPort}` });
|
||||
if (found) {
|
||||
await executeDockerCmd({
|
||||
dockerId: localDocker.id,
|
||||
command: `docker stop -t 0 ${id}-${publicPort} && docker rm ${id}-${publicPort} `
|
||||
})
|
||||
}
|
||||
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
|
||||
} else process.exit(0);
|
||||
})();
|
||||
@@ -1,19 +0,0 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { asyncExecShell, isDev, prisma } from '../lib/common';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
if (!isDev) {
|
||||
try {
|
||||
const { stdout } = await asyncExecShell(`ps -ef | grep /app/prisma-engines/query-engine | grep -v grep | wc -l | xargs`)
|
||||
if (stdout.trim() != null && stdout.trim() != '' && Number(stdout.trim()) > 1) {
|
||||
await asyncExecShell(`killall -q -e /app/prisma-engines/query-engine -o 10m`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
} else process.exit(0);
|
||||
})();
|
||||
@@ -1,60 +0,0 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, version } from '../lib/common';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
const destinationDockers = await prisma.destinationDocker.findMany();
|
||||
let enginesDone = new Set()
|
||||
for (const destination of destinationDockers) {
|
||||
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
|
||||
if (destination.engine) enginesDone.add(destination.engine)
|
||||
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
|
||||
|
||||
let lowDiskSpace = false;
|
||||
try {
|
||||
let stdout = null
|
||||
if (!isDev) {
|
||||
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
|
||||
stdout = output.stdout;
|
||||
} else {
|
||||
const output = await asyncExecShell(
|
||||
`df -kPT /`
|
||||
);
|
||||
stdout = output.stdout;
|
||||
}
|
||||
let lines = stdout.trim().split('\n');
|
||||
let header = lines[0];
|
||||
let regex =
|
||||
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||
const boundaries = [];
|
||||
let match;
|
||||
|
||||
while ((match = regex.exec(header))) {
|
||||
boundaries.push(match[0].length);
|
||||
}
|
||||
|
||||
boundaries[boundaries.length - 1] = -1;
|
||||
const data = lines.slice(1).map((line) => {
|
||||
const cl = boundaries.map((boundary) => {
|
||||
const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||
line = line.slice(boundary);
|
||||
return column.trim();
|
||||
});
|
||||
return {
|
||||
capacity: Number.parseInt(cl[5], 10) / 100
|
||||
};
|
||||
});
|
||||
if (data.length > 0) {
|
||||
const { capacity } = data[0];
|
||||
if (capacity > 0.8) {
|
||||
lowDiskSpace = true;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
} else process.exit(0);
|
||||
})();
|
||||
@@ -3,155 +3,302 @@ import crypto from 'crypto';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
|
||||
import { createDirectories, decrypt, executeDockerCmd, getDomain, prisma } from '../lib/common';
|
||||
import { copyBaseConfigurationFiles, makeLabelForSimpleDockerfile, makeLabelForStandaloneApplication, saveBuildLog, saveDockerRegistryCredentials, setDefaultConfiguration } from '../lib/buildPacks/common';
|
||||
import { createDirectories, decrypt, defaultComposeConfiguration, getDomain, prisma, decryptApplication, isDev, pushToRegistry, executeCommand } from '../lib/common';
|
||||
import * as importers from '../lib/importers';
|
||||
import * as buildpacks from '../lib/buildPacks';
|
||||
|
||||
(async () => {
|
||||
if (parentPort) {
|
||||
const concurrency = 1
|
||||
const PQueue = await import('p-queue');
|
||||
const queue = new PQueue.default({ concurrency });
|
||||
parentPort.on('message', async (message) => {
|
||||
if (parentPort) {
|
||||
if (message === 'error') throw new Error('oops');
|
||||
if (message === 'cancel') {
|
||||
parentPort.postMessage('cancelled');
|
||||
return;
|
||||
}
|
||||
if (message === 'status:autoUpdater') {
|
||||
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
|
||||
return;
|
||||
}
|
||||
if (message === 'status:cleanupStorage') {
|
||||
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
|
||||
return;
|
||||
}
|
||||
if (message === 'error') throw new Error('oops');
|
||||
if (message === 'cancel') {
|
||||
parentPort.postMessage('cancelled');
|
||||
await prisma.$disconnect()
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
const pThrottle = await import('p-throttle')
|
||||
const throttle = pThrottle.default({
|
||||
limit: 1,
|
||||
interval: 2000
|
||||
});
|
||||
|
||||
await queue.add(async () => {
|
||||
const {
|
||||
id: applicationId,
|
||||
repository,
|
||||
name,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
gitSource,
|
||||
build_id: buildId,
|
||||
configHash,
|
||||
fqdn,
|
||||
projectId,
|
||||
secrets,
|
||||
phpModules,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
sourceBranch = null,
|
||||
settings,
|
||||
persistentStorage,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
exposePort,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
} = message
|
||||
let {
|
||||
branch,
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
} = message
|
||||
try {
|
||||
const { debug } = settings;
|
||||
if (concurrency === 1) {
|
||||
await prisma.build.updateMany({
|
||||
where: {
|
||||
status: { in: ['queued', 'running'] },
|
||||
id: { not: buildId },
|
||||
applicationId,
|
||||
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||
},
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
}
|
||||
let imageId = applicationId;
|
||||
let domain = getDomain(fqdn);
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
// Previews, we need to get the source branch and set subdomain
|
||||
if (pullmergeRequestId) {
|
||||
branch = sourceBranch;
|
||||
domain = `${pullmergeRequestId}.${domain}`;
|
||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||
}
|
||||
|
||||
let deployNeeded = true;
|
||||
let destinationType;
|
||||
const th = throttle(async () => {
|
||||
try {
|
||||
const queuedBuilds = await prisma.build.findMany({ where: { status: { in: ['queued', 'running'] } }, orderBy: { createdAt: 'asc' } });
|
||||
const { concurrentBuilds } = await prisma.setting.findFirst({})
|
||||
if (queuedBuilds.length > 0) {
|
||||
parentPort.postMessage({ deploying: true });
|
||||
const concurrency = concurrentBuilds;
|
||||
const pAll = await import('p-all');
|
||||
const actions = []
|
||||
|
||||
if (destinationDockerId) {
|
||||
destinationType = 'docker';
|
||||
}
|
||||
if (destinationType === 'docker') {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||
const configuration = await setDefaultConfiguration(message);
|
||||
for (const queueBuild of queuedBuilds) {
|
||||
actions.push(async () => {
|
||||
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { dockerRegistry: true, destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
||||
|
||||
buildPack = configuration.buildPack;
|
||||
port = configuration.port;
|
||||
installCommand = configuration.installCommand;
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
baseDirectory = configuration.baseDirectory;
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
denoMainFile = configuration.denoMainFile;
|
||||
const commit = await importers[gitSource.type]({
|
||||
applicationId,
|
||||
debug,
|
||||
workdir,
|
||||
repodir,
|
||||
githubAppId: gitSource.githubApp?.id,
|
||||
gitlabAppId: gitSource.gitlabApp?.id,
|
||||
customPort: gitSource.customPort,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
apiUrl: gitSource.apiUrl,
|
||||
htmlUrl: gitSource.htmlUrl,
|
||||
let { id: buildId, type, gitSourceId, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild
|
||||
application = decryptApplication(application)
|
||||
|
||||
if (!gitSourceId && application.simpleDockerfile) {
|
||||
const {
|
||||
id: applicationId,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
secrets,
|
||||
port,
|
||||
persistentStorage,
|
||||
exposePort,
|
||||
simpleDockerfile,
|
||||
dockerRegistry
|
||||
} = application
|
||||
const { workdir } = await createDirectories({ repository: applicationId, buildId });
|
||||
try {
|
||||
if (queueBuild.status === 'running') {
|
||||
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
if (storage.oldPath) {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-').replace('-app', '')}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||
}) || [];
|
||||
|
||||
if (destinationDockerId) {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}`
|
||||
})
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const envs = [
|
||||
`PORT=${port}`
|
||||
];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, simpleDockerfile);
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry
|
||||
await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||
}
|
||||
|
||||
const labels = makeLabelForSimpleDockerfile({
|
||||
applicationId,
|
||||
type,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
});
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[applicationId]: {
|
||||
build: {
|
||||
context: workdir,
|
||||
},
|
||||
image: `${applicationId}:${buildId}`,
|
||||
container_name: applicationId,
|
||||
volumes,
|
||||
labels,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(destinationDocker.network),
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[destinationDocker.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeCommand({ debug: true, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
}
|
||||
} catch (error) {
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
if (error !== 1) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
await saveBuildLog({ line: error.message, buildId, applicationId: application.id });
|
||||
}
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (application.dockerRegistryImageName) {
|
||||
const customTag = application.dockerRegistryImageName.split(':')[1] || buildId;
|
||||
const imageName = application.dockerRegistryImageName.split(':')[0];
|
||||
await saveBuildLog({ line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`, buildId, applicationId: application.id });
|
||||
await pushToRegistry(application, workdir, buildId, imageName, customTag)
|
||||
await saveBuildLog({ line: "Success", buildId, applicationId: application.id });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.stdout) {
|
||||
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||
}
|
||||
if (error.stderr) {
|
||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||
}
|
||||
} finally {
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const originalApplicationId = application.id
|
||||
const {
|
||||
id: applicationId,
|
||||
name,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
gitSource,
|
||||
configHash,
|
||||
fqdn,
|
||||
projectId,
|
||||
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null
|
||||
});
|
||||
if (!commit) {
|
||||
throw new Error('No commit found?');
|
||||
}
|
||||
let tag = commit.slice(0, 7);
|
||||
if (pullmergeRequestId) {
|
||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||
}
|
||||
secrets,
|
||||
phpModules,
|
||||
settings,
|
||||
persistentStorage,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
exposePort,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
gitCommitHash,
|
||||
dockerRegistry
|
||||
} = application
|
||||
|
||||
try {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
let {
|
||||
branch,
|
||||
repository,
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
denoMainFile
|
||||
} = application
|
||||
|
||||
let imageId = applicationId;
|
||||
let domain = getDomain(fqdn);
|
||||
|
||||
let location = null;
|
||||
|
||||
let tag = null;
|
||||
let customTag = null;
|
||||
let imageName = null;
|
||||
|
||||
let imageFoundLocally = false;
|
||||
let imageFoundRemotely = false;
|
||||
|
||||
if (pullmergeRequestId) {
|
||||
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
|
||||
if (previewApplications.length > 0) {
|
||||
previewApplicationId = previewApplications[0].id
|
||||
}
|
||||
// Previews, we need to get the source branch and set subdomain
|
||||
branch = sourceBranch;
|
||||
domain = `${pullmergeRequestId}.${domain}`;
|
||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||
repository = sourceRepository || repository;
|
||||
}
|
||||
if (!pullmergeRequestId) {
|
||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||
try {
|
||||
if (queueBuild.status === 'running') {
|
||||
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
||||
}
|
||||
|
||||
const currentHash = crypto
|
||||
//@ts-ignore
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
deploymentType,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
buildPack,
|
||||
port,
|
||||
exposePort,
|
||||
@@ -165,42 +312,134 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
|
||||
if (configHash !== currentHash) {
|
||||
await prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
deployNeeded = true;
|
||||
if (configHash) {
|
||||
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
||||
}
|
||||
} else {
|
||||
deployNeeded = false;
|
||||
}
|
||||
} else {
|
||||
deployNeeded = true;
|
||||
}
|
||||
|
||||
let imageFound = false;
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker image inspect ${applicationId}:${tag}`
|
||||
})
|
||||
imageFound = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||
if (!imageFound || deployNeeded) {
|
||||
// if (true) {
|
||||
if (buildpacks[buildPack])
|
||||
await buildpacks[buildPack]({
|
||||
dockerId: destinationDocker.id,
|
||||
const { debug } = settings;
|
||||
if (!debug) {
|
||||
await saveBuildLog({
|
||||
line: `Debug logging is disabled. Enable it above if necessary!`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
if (storage.oldPath) {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-').replace('-app', '')}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||
}) || [];
|
||||
|
||||
|
||||
try {
|
||||
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration)
|
||||
} catch (error) { }
|
||||
let deployNeeded = true;
|
||||
let destinationType;
|
||||
|
||||
if (destinationDockerId) {
|
||||
destinationType = 'docker';
|
||||
}
|
||||
if (destinationType === 'docker') {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||
|
||||
const configuration = await setDefaultConfiguration(application);
|
||||
|
||||
buildPack = configuration.buildPack;
|
||||
port = configuration.port;
|
||||
installCommand = configuration.installCommand;
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
baseDirectory = configuration.baseDirectory || '';
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||
denoMainFile = configuration.denoMainFile;
|
||||
const commit = await importers[gitSource.type]({
|
||||
applicationId,
|
||||
domain,
|
||||
debug,
|
||||
workdir,
|
||||
repodir,
|
||||
githubAppId: gitSource.githubApp?.id,
|
||||
gitlabAppId: gitSource.gitlabApp?.id,
|
||||
customPort: gitSource.customPort,
|
||||
gitCommitHash,
|
||||
configuration,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
apiUrl: gitSource.apiUrl,
|
||||
htmlUrl: gitSource.htmlUrl,
|
||||
projectId,
|
||||
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null,
|
||||
forPublic: gitSource.forPublic
|
||||
});
|
||||
if (!commit) {
|
||||
throw new Error('No commit found?');
|
||||
}
|
||||
tag = commit.slice(0, 7);
|
||||
if (pullmergeRequestId) {
|
||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||
}
|
||||
if (application.dockerRegistryImageName) {
|
||||
imageName = application.dockerRegistryImageName.split(':')[0]
|
||||
customTag = application.dockerRegistryImageName.split(':')[1] || tag
|
||||
} else {
|
||||
customTag = tag
|
||||
imageName = applicationId;
|
||||
}
|
||||
|
||||
if (pullmergeRequestId) {
|
||||
customTag = `${customTag}-${pullmergeRequestId}`;
|
||||
}
|
||||
|
||||
try {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||
} catch (err) { }
|
||||
|
||||
if (!pullmergeRequestId) {
|
||||
if (configHash !== currentHash) {
|
||||
deployNeeded = true;
|
||||
if (configHash) {
|
||||
await saveBuildLog({ line: 'Configuration changed', buildId, applicationId });
|
||||
}
|
||||
} else {
|
||||
deployNeeded = false;
|
||||
}
|
||||
} else {
|
||||
deployNeeded = true;
|
||||
}
|
||||
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker image inspect ${applicationId}:${tag}`
|
||||
})
|
||||
imageFoundLocally = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||
}
|
||||
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker ${location ? `--config ${location}` : ''} pull ${imageName}:${customTag}`
|
||||
})
|
||||
imageFoundRemotely = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
let imageFound = `${applicationId}:${tag}`
|
||||
if (imageFoundRemotely) {
|
||||
imageFound = `${imageName}:${customTag}`
|
||||
}
|
||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||
const labels = makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
@@ -208,151 +447,260 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType
|
||||
publishDirectory
|
||||
});
|
||||
else {
|
||||
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
||||
throw new Error(`Build pack ${buildPack} not found.`);
|
||||
}
|
||||
} else {
|
||||
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
|
||||
}
|
||||
try {
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const envs = [];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
if (forceRebuild) deployNeeded = true
|
||||
if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
|
||||
if (buildpacks[buildPack])
|
||||
await buildpacks[buildPack]({
|
||||
dockerId: destinationDocker.id,
|
||||
network: destinationDocker.network,
|
||||
buildId,
|
||||
applicationId,
|
||||
domain,
|
||||
name,
|
||||
type,
|
||||
volumes,
|
||||
labels,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
publishDirectory,
|
||||
debug,
|
||||
commit,
|
||||
tag,
|
||||
workdir,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
phpModules,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
dockerFileLocation,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
});
|
||||
else {
|
||||
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
||||
throw new Error(`Build pack ${buildPack} not found.`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}='${secret.value}'`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
const labels = makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port: exposePort ? `${exposePort}:${port}` : port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
});
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
try {
|
||||
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[imageId]: {
|
||||
image: `${applicationId}:${tag}`,
|
||||
container_name: imageId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
networks: [destinationDocker.network],
|
||||
labels,
|
||||
depends_on: [],
|
||||
restart: 'always',
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
// logging: {
|
||||
// driver: 'fluentd',
|
||||
// },
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 3,
|
||||
window: '120s'
|
||||
}
|
||||
if (imageFoundRemotely || deployNeeded) {
|
||||
await saveBuildLog({ line: `Container image ${imageFound} found in Docker Registry - reuising it`, buildId, applicationId });
|
||||
} else {
|
||||
if (imageFoundLocally || deployNeeded) {
|
||||
await saveBuildLog({ line: `Container image ${imageFound} found locally - reuising it`, buildId, applicationId });
|
||||
}
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[destinationDocker.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
await prisma.build.update({
|
||||
where: { id: message.build_id },
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
throw new Error(error);
|
||||
}
|
||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
||||
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (error) {
|
||||
await prisma.build.update({
|
||||
where: { id: message.build_id },
|
||||
data: { status: 'failed' }
|
||||
if (buildPack === 'compose') {
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
|
||||
})
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
try {
|
||||
console.log({ debug })
|
||||
await executeCommand({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||
await prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
} else {
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}`
|
||||
})
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const envs = [
|
||||
`PORT=${port}`
|
||||
];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry
|
||||
await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||
}
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[imageId]: {
|
||||
image: imageFound,
|
||||
container_name: imageId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(destinationDocker.network),
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[destinationDocker.network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeCommand({ debug, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
if (!pullmergeRequestId) await prisma.application.update({
|
||||
where: { id: applicationId },
|
||||
data: { configHash: currentHash }
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
if (error !== 1) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||
}
|
||||
if (error instanceof Error) {
|
||||
await saveBuildLog({ line: error.message, buildId, applicationId: application.id });
|
||||
}
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
|
||||
await saveBuildLog({ line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`, buildId, applicationId: application.id });
|
||||
await pushToRegistry(application, workdir, tag, imageName, customTag)
|
||||
await saveBuildLog({ line: "Success", buildId, applicationId: application.id });
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.stdout) {
|
||||
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||
}
|
||||
if (error.stderr) {
|
||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||
}
|
||||
|
||||
} finally {
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||
}
|
||||
});
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
});
|
||||
await prisma.$disconnect();
|
||||
await pAll.default(actions, { concurrency })
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
});
|
||||
})
|
||||
while (true) {
|
||||
await th()
|
||||
}
|
||||
} else process.exit(0);
|
||||
})();
|
||||
|
||||
530
apps/api/src/lib.ts
Normal file
530
apps/api/src/lib.ts
Normal file
@@ -0,0 +1,530 @@
|
||||
import cuid from "cuid";
|
||||
import { decrypt, encrypt, fixType, generatePassword, generateToken, prisma } from "./lib/common";
|
||||
import { getTemplates } from "./lib/services";
|
||||
|
||||
export async function migrateApplicationPersistentStorage() {
|
||||
const settings = await prisma.setting.findFirst()
|
||||
if (settings) {
|
||||
const { id: settingsId, applicationStoragePathMigrationFinished } = settings
|
||||
try {
|
||||
if (!applicationStoragePathMigrationFinished) {
|
||||
const applications = await prisma.application.findMany({ include: { persistentStorage: true } });
|
||||
for (const application of applications) {
|
||||
if (application.persistentStorage && application.persistentStorage.length > 0 && application?.buildPack !== 'docker') {
|
||||
for (const storage of application.persistentStorage) {
|
||||
let { id, path } = storage
|
||||
if (!path.startsWith('/app')) {
|
||||
path = `/app${path}`
|
||||
await prisma.applicationPersistentStorage.update({ where: { id }, data: { path, oldPath: true } })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
await prisma.setting.update({ where: { id: settingsId }, data: { applicationStoragePathMigrationFinished: true } })
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function migrateServicesToNewTemplate() {
|
||||
// This function migrates old hardcoded services to the new template based services
|
||||
try {
|
||||
let templates = await getTemplates()
|
||||
const services: any = await prisma.service.findMany({
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
serviceSetting: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true,
|
||||
hasura: true,
|
||||
fider: true,
|
||||
moodle: true,
|
||||
appwrite: true,
|
||||
glitchTip: true,
|
||||
searxng: true,
|
||||
weblate: true,
|
||||
taiga: true,
|
||||
}
|
||||
})
|
||||
for (const service of services) {
|
||||
try {
|
||||
const { id } = service
|
||||
if (!service.type) {
|
||||
continue;
|
||||
}
|
||||
let template = templates.find(t => fixType(t.type) === fixType(service.type));
|
||||
if (template) {
|
||||
template = JSON.parse(JSON.stringify(template).replaceAll('$$id', service.id))
|
||||
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics) await plausibleAnalytics(service, template)
|
||||
if (service.type === 'fider' && service.fider) await fider(service, template)
|
||||
if (service.type === 'minio' && service.minio) await minio(service, template)
|
||||
if (service.type === 'vscodeserver' && service.vscodeserver) await vscodeserver(service, template)
|
||||
if (service.type === 'wordpress' && service.wordpress) await wordpress(service, template)
|
||||
if (service.type === 'ghost' && service.ghost) await ghost(service, template)
|
||||
if (service.type === 'meilisearch' && service.meiliSearch) await meilisearch(service, template)
|
||||
if (service.type === 'umami' && service.umami) await umami(service, template)
|
||||
if (service.type === 'hasura' && service.hasura) await hasura(service, template)
|
||||
if (service.type === 'glitchTip' && service.glitchTip) await glitchtip(service, template)
|
||||
if (service.type === 'searxng' && service.searxng) await searxng(service, template)
|
||||
if (service.type === 'weblate' && service.weblate) await weblate(service, template)
|
||||
if (service.type === 'appwrite' && service.appwrite) await appwrite(service, template)
|
||||
|
||||
try {
|
||||
await createVolumes(service, template);
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
if (template.variables) {
|
||||
if (template.variables.length > 0) {
|
||||
for (const variable of template.variables) {
|
||||
const { defaultValue } = variable;
|
||||
const regex = /^\$\$.*\((\d+)\)$/g;
|
||||
const length = Number(regex.exec(defaultValue)?.[1]) || undefined
|
||||
if (variable.defaultValue.startsWith('$$generate_password')) {
|
||||
variable.value = generatePassword({ length });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_hex')) {
|
||||
variable.value = generatePassword({ length, isHex: true });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
||||
variable.value = cuid();
|
||||
} else if (variable.defaultValue.startsWith('$$generate_token')) {
|
||||
variable.value = generateToken()
|
||||
} else {
|
||||
variable.value = variable.defaultValue || '';
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const variable of template.variables) {
|
||||
if (variable.id.startsWith('$$secret_')) {
|
||||
const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
|
||||
if (!found) {
|
||||
await prisma.serviceSecret.create({
|
||||
data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
if (variable.id.startsWith('$$config_')) {
|
||||
const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } })
|
||||
if (!found) {
|
||||
await prisma.serviceSetting.create({
|
||||
data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } }
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const s of Object.keys(template.services)) {
|
||||
if (service.type === 'plausibleanalytics') {
|
||||
continue;
|
||||
}
|
||||
if (template.services[s].volumes) {
|
||||
for (const volume of template.services[s].volumes) {
|
||||
const [volumeName, path] = volume.split(':')
|
||||
if (!volumeName.startsWith('/')) {
|
||||
const found = await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: id } })
|
||||
if (!found) {
|
||||
await prisma.servicePersistentStorage.create({
|
||||
data: { volumeName, path, containerId: s, predefined: true, service: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.service.update({ where: { id }, data: { templateVersion: template.templateVersion } })
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
|
||||
}
|
||||
}
|
||||
async function appwrite(service: any, template: any) {
|
||||
const { opensslKeyV1, executorSecret, mariadbHost, mariadbPort, mariadbUser, mariadbPassword, mariadbRootUserPassword, mariadbDatabase } = service.appwrite
|
||||
|
||||
const secrets = [
|
||||
`_APP_EXECUTOR_SECRET@@@${executorSecret}`,
|
||||
`_APP_OPENSSL_KEY_V1@@@${opensslKeyV1}`,
|
||||
`_APP_DB_PASS@@@${mariadbPassword}`,
|
||||
`_APP_DB_ROOT_PASS@@@${mariadbRootUserPassword}`,
|
||||
]
|
||||
|
||||
const settings = [
|
||||
`_APP_DB_HOST@@@${mariadbHost}`,
|
||||
`_APP_DB_PORT@@@${mariadbPort}`,
|
||||
`_APP_DB_USER@@@${mariadbUser}`,
|
||||
`_APP_DB_SCHEMA@@@${mariadbDatabase}`,
|
||||
]
|
||||
await migrateSecrets(secrets, service);
|
||||
await migrateSettings(settings, service, template);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { appwrite: { disconnect: true } } })
|
||||
}
|
||||
async function weblate(service: any, template: any) {
|
||||
const { adminPassword, postgresqlUser, postgresqlPassword, postgresqlDatabase } = service.weblate
|
||||
|
||||
const secrets = [
|
||||
`WEBLATE_ADMIN_PASSWORD@@@${adminPassword}`,
|
||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||
]
|
||||
|
||||
const settings = [
|
||||
`WEBLATE_SITE_DOMAIN@@@$$generate_domain`,
|
||||
`POSTGRES_USER@@@${postgresqlUser}`,
|
||||
`POSTGRES_DATABASE@@@${postgresqlDatabase}`,
|
||||
`POSTGRES_DB@@@${postgresqlDatabase}`,
|
||||
`POSTGRES_HOST@@@$$id-postgres`,
|
||||
`POSTGRES_PORT@@@5432`,
|
||||
`REDIS_HOST@@@$$id-redis`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { weblate: { disconnect: true } } })
|
||||
}
|
||||
async function searxng(service: any, template: any) {
|
||||
const { secretKey, redisPassword } = service.searxng
|
||||
|
||||
const secrets = [
|
||||
`SECRET_KEY@@@${secretKey}`,
|
||||
`REDIS_PASSWORD@@@${redisPassword}`,
|
||||
]
|
||||
|
||||
const settings = [
|
||||
`SEARXNG_BASE_URL@@@$$generate_fqdn`
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { searxng: { disconnect: true } } })
|
||||
}
|
||||
async function glitchtip(service: any, template: any) {
|
||||
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, secretKeyBase, defaultEmail, defaultUsername, defaultPassword, defaultEmailFrom, emailSmtpHost, emailSmtpPort, emailSmtpUser, emailSmtpPassword, emailSmtpUseTls, emailSmtpUseSsl, emailBackend, mailgunApiKey, sendgridApiKey, enableOpenUserRegistration } = service.glitchTip
|
||||
const { id } = service
|
||||
|
||||
const secrets = [
|
||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||
`SECRET_KEY@@@${secretKeyBase}`,
|
||||
`MAILGUN_API_KEY@@@${mailgunApiKey}`,
|
||||
`SENDGRID_API_KEY@@@${sendgridApiKey}`,
|
||||
`DJANGO_SUPERUSER_PASSWORD@@@${defaultPassword}`,
|
||||
emailSmtpUser && emailSmtpPassword && emailSmtpHost && emailSmtpPort && `EMAIL_URL@@@${encrypt(`smtp://${emailSmtpUser}:${decrypt(emailSmtpPassword)}@${emailSmtpHost}:${emailSmtpPort}`)} || ''`,
|
||||
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
||||
`REDIS_URL@@@${encrypt(`redis://${id}-redis:6379`)}`
|
||||
]
|
||||
const settings = [
|
||||
`POSTGRES_USER@@@${postgresqlUser}`,
|
||||
`POSTGRES_DB@@@${postgresqlDatabase}`,
|
||||
`DEFAULT_FROM_EMAIL@@@${defaultEmailFrom}`,
|
||||
`EMAIL_USE_TLS@@@${emailSmtpUseTls}`,
|
||||
`EMAIL_USE_SSL@@@${emailSmtpUseSsl}`,
|
||||
`EMAIL_BACKEND@@@${emailBackend}`,
|
||||
`ENABLE_OPEN_USER_REGISTRATION@@@${enableOpenUserRegistration}`,
|
||||
`DJANGO_SUPERUSER_EMAIL@@@${defaultEmail}`,
|
||||
`DJANGO_SUPERUSER_USERNAME@@@${defaultUsername}`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
await prisma.service.update({ where: { id: service.id }, data: { type: 'glitchtip' } })
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { glitchTip: { disconnect: true } } })
|
||||
}
|
||||
async function hasura(service: any, template: any) {
|
||||
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, graphQLAdminPassword } = service.hasura
|
||||
const { id } = service
|
||||
|
||||
const secrets = [
|
||||
`HASURA_GRAPHQL_ADMIN_SECRET@@@${graphQLAdminPassword}`,
|
||||
`HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||
]
|
||||
const settings = [
|
||||
`POSTGRES_USER@@@${postgresqlUser}`,
|
||||
`POSTGRES_DB@@@${postgresqlDatabase}`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { hasura: { disconnect: true } } })
|
||||
}
|
||||
async function umami(service: any, template: any) {
|
||||
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami
|
||||
const { id } = service
|
||||
const secrets = [
|
||||
`HASH_SALT@@@${hashSalt}`,
|
||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||
`ADMIN_PASSWORD@@@${umamiAdminPassword}`,
|
||||
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
||||
]
|
||||
const settings = [
|
||||
`DATABASE_TYPE@@@postgresql`,
|
||||
`POSTGRES_USER@@@${postgresqlUser}`,
|
||||
`POSTGRES_DB@@@${postgresqlDatabase}`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
await prisma.service.update({ where: { id: service.id }, data: { type: "umami-postgresql" } })
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { umami: { disconnect: true } } })
|
||||
}
|
||||
async function meilisearch(service: any, template: any) {
|
||||
const { masterKey } = service.meiliSearch
|
||||
|
||||
const secrets = [
|
||||
`MEILI_MASTER_KEY@@@${masterKey}`,
|
||||
]
|
||||
|
||||
// await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { meiliSearch: { disconnect: true } } })
|
||||
}
|
||||
async function ghost(service: any, template: any) {
|
||||
const { defaultEmail, defaultPassword, mariadbUser, mariadbPassword, mariadbRootUser, mariadbRootUserPassword, mariadbDatabase } = service.ghost
|
||||
const { fqdn } = service
|
||||
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
|
||||
const secrets = [
|
||||
`GHOST_PASSWORD@@@${defaultPassword}`,
|
||||
`MARIADB_PASSWORD@@@${mariadbPassword}`,
|
||||
`MARIADB_ROOT_PASSWORD@@@${mariadbRootUserPassword}`,
|
||||
`GHOST_DATABASE_PASSWORD@@@${mariadbPassword}`,
|
||||
]
|
||||
const settings = [
|
||||
`GHOST_EMAIL@@@${defaultEmail}`,
|
||||
`GHOST_DATABASE_HOST@@@${service.id}-mariadb`,
|
||||
`GHOST_DATABASE_USER@@@${mariadbUser}`,
|
||||
`GHOST_DATABASE_NAME@@@${mariadbDatabase}`,
|
||||
`GHOST_DATABASE_PORT_NUMBER@@@3306`,
|
||||
`MARIADB_USER@@@${mariadbUser}`,
|
||||
`MARIADB_DATABASE@@@${mariadbDatabase}`,
|
||||
`MARIADB_ROOT_USER@@@${mariadbRootUser}`,
|
||||
`GHOST_HOST@@@$$generate_domain`,
|
||||
`url@@@$$generate_fqdn`,
|
||||
`GHOST_ENABLE_HTTPS@@@${isHttps ? 'yes' : 'no'}`
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
await prisma.service.update({ where: { id: service.id }, data: { type: "ghost-mariadb" } })
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { ghost: { disconnect: true } } })
|
||||
}
|
||||
async function wordpress(service: any, template: any) {
|
||||
const { extraConfig, tablePrefix, ownMysql, mysqlHost, mysqlPort, mysqlUser, mysqlPassword, mysqlRootUser, mysqlRootUserPassword, mysqlDatabase, ftpEnabled, ftpUser, ftpPassword, ftpPublicPort, ftpHostKey, ftpHostKeyPrivate } = service.wordpress
|
||||
|
||||
let settings = []
|
||||
let secrets = []
|
||||
if (ownMysql) {
|
||||
secrets = [
|
||||
`WORDPRESS_DB_PASSWORD@@@${mysqlPassword}`,
|
||||
ftpPassword && `COOLIFY_FTP_PASSWORD@@@${ftpPassword}`,
|
||||
ftpHostKeyPrivate && `COOLIFY_FTP_HOST_KEY_PRIVATE@@@${ftpHostKeyPrivate}`,
|
||||
ftpHostKey && `COOLIFY_FTP_HOST_KEY@@@${ftpHostKey}`,
|
||||
]
|
||||
settings = [
|
||||
`WORDPRESS_CONFIG_EXTRA@@@${extraConfig}`,
|
||||
`WORDPRESS_DB_HOST@@@${mysqlHost}`,
|
||||
`WORDPRESS_DB_PORT@@@${mysqlPort}`,
|
||||
`WORDPRESS_DB_USER@@@${mysqlUser}`,
|
||||
`WORDPRESS_DB_NAME@@@${mysqlDatabase}`,
|
||||
]
|
||||
} else {
|
||||
secrets = [
|
||||
`MYSQL_ROOT_PASSWORD@@@${mysqlRootUserPassword}`,
|
||||
`MYSQL_PASSWORD@@@${mysqlPassword}`,
|
||||
ftpPassword && `COOLIFY_FTP_PASSWORD@@@${ftpPassword}`,
|
||||
ftpHostKeyPrivate && `COOLIFY_FTP_HOST_KEY_PRIVATE@@@${ftpHostKeyPrivate}`,
|
||||
ftpHostKey && `COOLIFY_FTP_HOST_KEY@@@${ftpHostKey}`,
|
||||
]
|
||||
settings = [
|
||||
`MYSQL_ROOT_USER@@@${mysqlRootUser}`,
|
||||
`MYSQL_USER@@@${mysqlUser}`,
|
||||
`MYSQL_DATABASE@@@${mysqlDatabase}`,
|
||||
`MYSQL_HOST@@@${service.id}-mysql`,
|
||||
`MYSQL_PORT@@@${mysqlPort}`,
|
||||
`WORDPRESS_CONFIG_EXTRA@@@${extraConfig}`,
|
||||
`WORDPRESS_TABLE_PREFIX@@@${tablePrefix}`,
|
||||
`WORDPRESS_DB_HOST@@@${service.id}-mysql`,
|
||||
`COOLIFY_OWN_DB@@@${ownMysql}`,
|
||||
`COOLIFY_FTP_ENABLED@@@${ftpEnabled}`,
|
||||
`COOLIFY_FTP_USER@@@${ftpUser}`,
|
||||
`COOLIFY_FTP_PUBLIC_PORT@@@${ftpPublicPort}`,
|
||||
]
|
||||
}
|
||||
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
if (ownMysql) {
|
||||
await prisma.service.update({ where: { id: service.id }, data: { type: "wordpress-only" } })
|
||||
}
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { wordpress: { disconnect: true } } })
|
||||
}
|
||||
async function vscodeserver(service: any, template: any) {
|
||||
const { password } = service.vscodeserver
|
||||
|
||||
const secrets = [
|
||||
`PASSWORD@@@${password}`,
|
||||
]
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { vscodeserver: { disconnect: true } } })
|
||||
}
|
||||
async function minio(service: any, template: any) {
|
||||
const { rootUser, rootUserPassword, apiFqdn } = service.minio
|
||||
const secrets = [
|
||||
`MINIO_ROOT_PASSWORD@@@${rootUserPassword}`,
|
||||
]
|
||||
const settings = [
|
||||
`MINIO_ROOT_USER@@@${rootUser}`,
|
||||
`MINIO_SERVER_URL@@@${apiFqdn}`,
|
||||
`MINIO_BROWSER_REDIRECT_URL@@@$$generate_fqdn`,
|
||||
`MINIO_DOMAIN@@@$$generate_domain`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { minio: { disconnect: true } } })
|
||||
}
|
||||
async function fider(service: any, template: any) {
|
||||
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, jwtSecret, emailNoreply, emailMailgunApiKey, emailMailgunDomain, emailMailgunRegion, emailSmtpHost, emailSmtpPort, emailSmtpUser, emailSmtpPassword, emailSmtpEnableStartTls } = service.fider
|
||||
const { id } = service
|
||||
const secrets = [
|
||||
`JWT_SECRET@@@${jwtSecret}`,
|
||||
emailMailgunApiKey && `EMAIL_MAILGUN_API@@@${emailMailgunApiKey}`,
|
||||
emailSmtpPassword && `EMAIL_SMTP_PASSWORD@@@${emailSmtpPassword}`,
|
||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||
`DATABASE_URL@@@${encrypt(`postgresql://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}?sslmode=disable`)}`
|
||||
]
|
||||
const settings = [
|
||||
`BASE_URL@@@$$generate_fqdn`,
|
||||
`EMAIL_NOREPLY@@@${emailNoreply || 'noreply@example.com'}`,
|
||||
`EMAIL_MAILGUN_DOMAIN@@@${emailMailgunDomain || ''}`,
|
||||
`EMAIL_MAILGUN_REGION@@@${emailMailgunRegion || ''}`,
|
||||
`EMAIL_SMTP_HOST@@@${emailSmtpHost || ''}`,
|
||||
`EMAIL_SMTP_PORT@@@${emailSmtpPort || 587}`,
|
||||
`EMAIL_SMTP_USER@@@${emailSmtpUser || ''}`,
|
||||
`EMAIL_SMTP_PASSWORD@@@${emailSmtpPassword || ''}`,
|
||||
`EMAIL_SMTP_ENABLE_STARTTLS@@@${emailSmtpEnableStartTls || 'false'}`,
|
||||
`POSTGRES_USER@@@${postgresqlUser}`,
|
||||
`POSTGRES_DB@@@${postgresqlDatabase}`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { fider: { disconnect: true } } })
|
||||
|
||||
}
|
||||
async function plausibleAnalytics(service: any, template: any) {
|
||||
const { email, username, password, postgresqlUser, postgresqlPassword, postgresqlDatabase, secretKeyBase, scriptName } = service.plausibleAnalytics;
|
||||
const { id } = service
|
||||
|
||||
const settings = [
|
||||
`BASE_URL@@@$$generate_fqdn`,
|
||||
`ADMIN_USER_EMAIL@@@${email}`,
|
||||
`ADMIN_USER_NAME@@@${username}`,
|
||||
`DISABLE_AUTH@@@false`,
|
||||
`DISABLE_REGISTRATION@@@true`,
|
||||
`POSTGRESQL_USERNAME@@@${postgresqlUser}`,
|
||||
`POSTGRESQL_DATABASE@@@${postgresqlDatabase}`,
|
||||
`SCRIPT_NAME@@@${scriptName}`,
|
||||
]
|
||||
const secrets = [
|
||||
`ADMIN_USER_PWD@@@${password}`,
|
||||
`SECRET_KEY_BASE@@@${secretKeyBase}`,
|
||||
`POSTGRESQL_PASSWORD@@@${postgresqlPassword}`,
|
||||
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
||||
]
|
||||
await migrateSettings(settings, service, template);
|
||||
await migrateSecrets(secrets, service);
|
||||
|
||||
// Disconnect old service data
|
||||
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
||||
}
|
||||
async function migrateSettings(settings: any[], service: any, template: any) {
|
||||
for (const setting of settings) {
|
||||
try {
|
||||
if (!setting) continue;
|
||||
let [name, value] = setting.split('@@@')
|
||||
let minio = name
|
||||
if (name === 'MINIO_SERVER_URL') {
|
||||
name = 'coolify_fqdn_minio_console'
|
||||
}
|
||||
if (!value || value === 'null') {
|
||||
continue;
|
||||
}
|
||||
let variableName = template.variables.find((v: any) => v.name === name)?.id
|
||||
if (!variableName) {
|
||||
variableName = `$$config_${name.toLowerCase()}`
|
||||
}
|
||||
// console.log('Migrating setting', name, value, 'for service', service.id, ', service name:', service.name, 'variableName: ', variableName)
|
||||
|
||||
await prisma.serviceSetting.findFirst({ where: { name: minio, serviceId: service.id } }) || await prisma.serviceSetting.create({ data: { name: minio, value, variableName, service: { connect: { id: service.id } } } })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
async function migrateSecrets(secrets: any[], service: any) {
|
||||
for (const secret of secrets) {
|
||||
try {
|
||||
if (!secret) continue;
|
||||
let [name, value] = secret.split('@@@')
|
||||
if (!value || value === 'null') {
|
||||
continue
|
||||
}
|
||||
// console.log('Migrating secret', name, value, 'for service', service.id, ', service name:', service.name)
|
||||
await prisma.serviceSecret.findFirst({ where: { name, serviceId: service.id } }) || await prisma.serviceSecret.create({ data: { name, value, service: { connect: { id: service.id } } } })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
async function createVolumes(service: any, template: any) {
|
||||
const volumes = [];
|
||||
for (const s of Object.keys(template.services)) {
|
||||
if (template.services[s].volumes && template.services[s].volumes.length > 0) {
|
||||
for (const volume of template.services[s].volumes) {
|
||||
let volumeName = volume.split(':')[0]
|
||||
const volumePath = volume.split(':')[1]
|
||||
let volumeService = s
|
||||
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics?.id) {
|
||||
let volumeId = volumeName.split('-')[0]
|
||||
volumeName = volumeName.replace(volumeId, service.plausibleAnalytics.id)
|
||||
}
|
||||
volumes.push(`${volumeName}@@@${volumePath}@@@${volumeService}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const volume of volumes) {
|
||||
const [volumeName, path, containerId] = volume.split('@@@')
|
||||
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
|
||||
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { base64Encode, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
||||
import { base64Encode, decrypt, encrypt, executeCommand, generateTimestamp, getDomain, isARM, isDev, prisma, version } from "../common";
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from "../dayjs";
|
||||
|
||||
@@ -52,6 +52,14 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
{
|
||||
value: 'webdevops/apache:alpine',
|
||||
label: 'webdevops/apache:alpine'
|
||||
},
|
||||
{
|
||||
value: 'nginx:alpine',
|
||||
label: 'nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'httpd:alpine',
|
||||
label: 'httpd:alpine (Apache)'
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
@@ -89,6 +97,22 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2',
|
||||
label: 'webdevops/php-apache:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2',
|
||||
label: 'webdevops/php-nginx:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1',
|
||||
label: 'webdevops/php-apache:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1',
|
||||
label: 'webdevops/php-nginx:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0',
|
||||
label: 'webdevops/php-apache:8.0'
|
||||
@@ -145,6 +169,22 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
value: 'webdevops/php-nginx:5.6',
|
||||
label: 'webdevops/php-nginx:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2-alpine',
|
||||
label: 'webdevops/php-apache:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2-alpine',
|
||||
label: 'webdevops/php-nginx:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1-alpine',
|
||||
label: 'webdevops/php-apache:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1-alpine',
|
||||
label: 'webdevops/php-nginx:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0-alpine',
|
||||
label: 'webdevops/php-apache:8.0-alpine'
|
||||
@@ -182,8 +222,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
label: 'webdevops/php-apache:7.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.1-alpine',
|
||||
label: 'webdevops/php-nginx:7.1-alpine'
|
||||
value: 'php:8.1-fpm',
|
||||
label: 'php:8.1-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm',
|
||||
label: 'php:8.0-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.1-fpm-alpine',
|
||||
label: 'php:8.1-fpm-alpine'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm-alpine',
|
||||
label: 'php:8.0-fpm-alpine'
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
@@ -252,6 +304,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
label: 'python:3.7-slim-bullseye'
|
||||
}
|
||||
];
|
||||
const herokuVersions = [
|
||||
{
|
||||
value: 'heroku/builder:22',
|
||||
label: 'heroku/builder:22'
|
||||
},
|
||||
{
|
||||
value: 'heroku/buildpacks:20',
|
||||
label: 'heroku/buildpacks:20'
|
||||
},
|
||||
{
|
||||
value: 'heroku/builder-classic:22',
|
||||
label: 'heroku/builder-classic:22'
|
||||
},
|
||||
]
|
||||
let payload: any = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
@@ -260,8 +326,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
if (deploymentType === 'static') {
|
||||
payload.baseImage = 'webdevops/nginx:alpine';
|
||||
payload.baseImages = staticVersions;
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch) ? staticVersions.filter((version) => !version.value.includes('webdevops')) : staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
} else {
|
||||
@@ -272,8 +338,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
}
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = 'webdevops/nginx:alpine';
|
||||
payload.baseImages = staticVersions;
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch) ? staticVersions.filter((version) => !version.value.includes('webdevops')) : staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
@@ -291,14 +357,19 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
payload.baseImage = 'denoland/deno:latest';
|
||||
}
|
||||
if (buildPack === 'php') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseImages = phpVersions;
|
||||
payload.baseImage = isARM(process.arch) ? 'php:8.1-fpm-alpine' : 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch) ? phpVersions.filter((version) => !version.value.includes('webdevops')) : phpVersions
|
||||
}
|
||||
if (buildPack === 'laravel') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseImage = isARM(process.arch) ? 'php:8.1-fpm-alpine' : 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch) ? phpVersions.filter((version) => !version.value.includes('webdevops')) : phpVersions
|
||||
payload.baseBuildImage = 'node:18';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'heroku') {
|
||||
payload.baseImage = 'heroku/buildpacks:20';
|
||||
payload.baseImages = herokuVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
|
||||
@@ -312,6 +383,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
//@ts-ignore
|
||||
@@ -333,7 +405,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
@@ -341,6 +413,12 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
} else {
|
||||
dockerFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith('/')) dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith('/')) dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = 'main.ts';
|
||||
}
|
||||
@@ -354,6 +432,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
@@ -410,17 +489,40 @@ export const saveBuildLog = async ({
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
}): Promise<any> => {
|
||||
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||
const { default: got } = await import('got')
|
||||
if (typeof line === 'object' && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + '\n' + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||
}
|
||||
});
|
||||
const fluentBitUrl = isDev ? process.env.COOLIFY_CONTAINER_DEV === 'true' ? 'http://coolify-fluentbit:24224' : 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
|
||||
|
||||
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: encrypt(line)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
export async function copyBaseConfigurationFiles(
|
||||
@@ -492,8 +594,8 @@ export async function copyBaseConfigurationFiles(
|
||||
`
|
||||
);
|
||||
}
|
||||
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
@@ -506,7 +608,29 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
|
||||
);
|
||||
}
|
||||
|
||||
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null
|
||||
}
|
||||
|
||||
let decryptedPassword = decrypt(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
|
||||
try {
|
||||
await fs.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
"auths": {
|
||||
[url]: {
|
||||
"auth": Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||
}
|
||||
}
|
||||
})
|
||||
await fs.writeFile(`${location}/config.json`, payload)
|
||||
return location
|
||||
}
|
||||
export async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
@@ -515,104 +639,40 @@ export async function buildImage({
|
||||
dockerId,
|
||||
isCache = false,
|
||||
debug = false,
|
||||
dockerFileLocation = '/Dockerfile'
|
||||
dockerFileLocation = '/Dockerfile',
|
||||
commit
|
||||
}) {
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
|
||||
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
}
|
||||
if (debug) {
|
||||
await saveBuildLog({ line: `\n###############\nIMPORTANT: Due to some issues during implementing Remote Docker Engine, the builds logs are not streamed at the moment - but will be soon! You will see the full build log when the build is finished!\n###############`, buildId, applicationId });
|
||||
}
|
||||
if (!debug && isCache) {
|
||||
await saveBuildLog({
|
||||
line: `Debug turned off. To see more details, allow it in the configuration.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
}
|
||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
|
||||
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
||||
const { stderr } = await executeDockerCmd({ dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} ${workdir}` })
|
||||
if (debug) {
|
||||
const array = stderr.split('\n')
|
||||
for (const line of array) {
|
||||
if (line !== '\n') {
|
||||
await saveBuildLog({
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let location = null
|
||||
|
||||
const { dockerRegistry } = await prisma.application.findUnique({ where: { id: applicationId }, select: { dockerRegistry: true } })
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||
}
|
||||
|
||||
await executeCommand({ stream: true, debug, buildId, applicationId, dockerId, command: `docker ${location ? `--config ${location}` : ''} build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
||||
|
||||
// await new Promise((resolve, reject) => {
|
||||
// const command = spawn(`docker`, ['build', '-f', `${workdir}${dockerFile}`, '-t', `${cache}`,`${workdir}`], {
|
||||
// env: {
|
||||
// DOCKER_HOST: 'ssh://root@95.217.178.202',
|
||||
// DOCKER_BUILDKIT: '1'
|
||||
// }
|
||||
// });
|
||||
// command.stdout.on('data', function (data) {
|
||||
// console.log('stdout: ' + data);
|
||||
// });
|
||||
// command.stderr.on('data', function (data) {
|
||||
// console.log('stderr: ' + data);
|
||||
// });
|
||||
// command.on('error', function (error) {
|
||||
// console.log(error)
|
||||
// reject(error)
|
||||
// })
|
||||
// command.on('exit', function (code) {
|
||||
// console.log('exit code: ' + code);
|
||||
// resolve(code)
|
||||
// });
|
||||
// })
|
||||
|
||||
|
||||
// console.log({ stdout, stderr })
|
||||
// const stream = await docker.engine.buildImage(
|
||||
// { src: ['.'], context: workdir },
|
||||
// {
|
||||
// dockerfile: isCache ? `${dockerFileLocation}-cache` : dockerFileLocation,
|
||||
// t: `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
||||
// }
|
||||
// );
|
||||
// await streamEvents({ stream, docker, buildId, applicationId, debug });
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (status === 'canceled') {
|
||||
throw new Error('Canceled.')
|
||||
}
|
||||
}
|
||||
|
||||
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress);
|
||||
function onFinished(err, res) {
|
||||
if (err) reject(err);
|
||||
resolve(res);
|
||||
}
|
||||
async function onProgress(event) {
|
||||
if (event.error) {
|
||||
reject(event.error);
|
||||
} else if (event.stream) {
|
||||
if (event.stream !== '\n') {
|
||||
if (debug)
|
||||
await saveBuildLog({
|
||||
line: `${event.stream.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`
|
||||
];
|
||||
}
|
||||
|
||||
export function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
@@ -639,7 +699,9 @@ export function makeLabelForStandaloneApplication({
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`,
|
||||
`coolify.name=${name}`,
|
||||
`coolify.configuration=${base64Encode(
|
||||
JSON.stringify({
|
||||
applicationId,
|
||||
@@ -673,8 +735,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
|
||||
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
@@ -684,7 +744,10 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
@@ -698,11 +761,11 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
// Dockerfile.push(`ARG CACHEBUST=1`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
@@ -719,7 +782,10 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
@@ -759,4 +825,4 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
}
|
||||
|
||||
126
apps/api/src/lib/buildPacks/compose.ts
Normal file
126
apps/api/src/lib/buildPacks/compose.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { defaultComposeConfiguration, executeCommand } from '../common';
|
||||
import { saveBuildLog } from './common';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export default async function (data) {
|
||||
let {
|
||||
applicationId,
|
||||
debug,
|
||||
buildId,
|
||||
dockerId,
|
||||
network,
|
||||
volumes,
|
||||
labels,
|
||||
workdir,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation
|
||||
} = data;
|
||||
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
|
||||
const dockerComposeYaml = yaml.load(dockerComposeRaw);
|
||||
if (!dockerComposeYaml.services) {
|
||||
throw 'No Services found in docker-compose file.';
|
||||
}
|
||||
const envs = [];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const composeVolumes = [];
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
let [v, path] = volume.split(':');
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value['container_name'] = `${applicationId}-${key}`;
|
||||
value['env_file'] = envFound ? [`${workdir}/.env`] : [];
|
||||
value['labels'] = labels;
|
||||
// TODO: If we support separated volume for each service, we need to add it here
|
||||
if (value['volumes']?.length > 0) {
|
||||
value['volumes'] = value['volumes'].map((volume) => {
|
||||
let [v, path, permission] = volume.split(':');
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||
});
|
||||
}
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
value['volumes'].push(volume);
|
||||
}
|
||||
}
|
||||
if (dockerComposeConfiguration[key].port) {
|
||||
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||
}
|
||||
if (value['networks']?.length > 0) {
|
||||
value['networks'].forEach((network) => {
|
||||
networks[network] = {
|
||||
name: network
|
||||
};
|
||||
});
|
||||
}
|
||||
value['networks'] = [...(value['networks'] || ''), network];
|
||||
dockerComposeYaml.services[key] = {
|
||||
...dockerComposeYaml.services[key],
|
||||
restart: defaultComposeConfiguration(network).restart,
|
||||
deploy: defaultComposeConfiguration(network).deploy
|
||||
};
|
||||
}
|
||||
if (Object.keys(composeVolumes).length > 0) {
|
||||
dockerComposeYaml['volumes'] = { ...composeVolumes };
|
||||
}
|
||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} pull`
|
||||
});
|
||||
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||
});
|
||||
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||
}
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
@@ -46,7 +49,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
@@ -13,40 +13,35 @@ export default async function (data) {
|
||||
pullmergeRequestId,
|
||||
dockerFileLocation
|
||||
} = data
|
||||
try {
|
||||
const file = `${workdir}${dockerFileLocation}`;
|
||||
let dockerFileOut = `${workdir}`;
|
||||
if (baseDirectory) {
|
||||
dockerFileOut = `${workdir}${baseDirectory}`;
|
||||
workdir = `${workdir}${baseDirectory}`;
|
||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||
data.workdir = `${workdir}${baseDirectory}`;
|
||||
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8')
|
||||
const Dockerfile: Array<string> = DockerfileRaw
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n');
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||
}
|
||||
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8'))
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (
|
||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||
) {
|
||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
||||
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (
|
||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||
) {
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
}
|
||||
|
||||
@@ -1,28 +1,16 @@
|
||||
import { executeDockerCmd, prisma } from "../common"
|
||||
import { executeCommand } from "../common"
|
||||
import { saveBuildLog } from "./common";
|
||||
|
||||
export default async function (data: any): Promise<void> {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||
try {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir } = data
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
const { stdout } = await executeDockerCmd({
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
await executeCommand({
|
||||
buildId,
|
||||
debug,
|
||||
dockerId,
|
||||
command: `pack build -p ${workdir} ${applicationId}:${tag} --builder heroku/buildpacks:20`
|
||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||
})
|
||||
|
||||
if (debug) {
|
||||
const array = stdout.split('\n')
|
||||
for (const line of array) {
|
||||
if (line !== '\n') {
|
||||
await saveBuildLog({
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import python from './python';
|
||||
import deno from './deno';
|
||||
import laravel from './laravel';
|
||||
import heroku from './heroku';
|
||||
import compose from './compose'
|
||||
|
||||
export {
|
||||
node,
|
||||
@@ -35,5 +36,6 @@ export {
|
||||
python,
|
||||
deno,
|
||||
laravel,
|
||||
heroku
|
||||
heroku,
|
||||
compose
|
||||
};
|
||||
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -23,7 +23,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -16,7 +16,10 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -21,7 +21,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import TOML from '@iarna/toml';
|
||||
import { asyncExecShell } from '../common';
|
||||
import { executeCommand } from '../common';
|
||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
@@ -28,7 +28,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
|
||||
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||
const parsedToml: any = TOML.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||
|
||||
@@ -18,13 +18,20 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
if (baseImage?.includes('httpd')) {
|
||||
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
|
||||
} else {
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
}
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
import { executeDockerCmd } from './common';
|
||||
import { executeCommand } from './common';
|
||||
|
||||
export function formatLabelsOnDocker(data) {
|
||||
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
||||
@@ -13,45 +13,56 @@ export function formatLabelsOnDocker(data) {
|
||||
return container
|
||||
})
|
||||
}
|
||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<boolean> {
|
||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
||||
let containerFound = false;
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker inspect --format '{{json .State}}' ${container}`
|
||||
});
|
||||
|
||||
containerFound = true
|
||||
const parsedStdout = JSON.parse(stdout);
|
||||
const status = parsedStdout.Status;
|
||||
const isRunning = status === 'running';
|
||||
const isRestarting = status === 'restarting'
|
||||
const isExited = status === 'exited'
|
||||
if (status === 'created') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (remove && status === 'exited') {
|
||||
await executeDockerCmd({
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command:
|
||||
`docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (isRunning) {
|
||||
containerFound = true;
|
||||
}
|
||||
|
||||
return {
|
||||
found: containerFound,
|
||||
status: {
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
// Container not found
|
||||
}
|
||||
return containerFound;
|
||||
return {
|
||||
found: false
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
||||
let isExited = false;
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||
const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||
if (stdout.trim() === 'exited') {
|
||||
isExited = true;
|
||||
}
|
||||
@@ -70,14 +81,15 @@ export async function removeContainer({
|
||||
dockerId: string;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
|
||||
const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
if (JSON.parse(stdout).Running) {
|
||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||
}
|
||||
if (JSON.parse(stdout).Status === 'exited') {
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { saveBuildLog } from '../buildPacks/common';
|
||||
import { asyncExecShell, decrypt, prisma } from '../common';
|
||||
import { decrypt, executeCommand, prisma } from '../common';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
@@ -9,55 +9,86 @@ export default async function ({
|
||||
githubAppId,
|
||||
repository,
|
||||
apiUrl,
|
||||
gitCommitHash,
|
||||
htmlUrl,
|
||||
branch,
|
||||
buildId,
|
||||
customPort
|
||||
customPort,
|
||||
forPublic
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
githubAppId: string;
|
||||
repository: string;
|
||||
apiUrl: string;
|
||||
gitCommitHash?: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
customPort: number;
|
||||
forPublic?: boolean;
|
||||
}): Promise<string> {
|
||||
const { default: got } = await import('got')
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
||||
if (forPublic) {
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body
|
||||
} else {
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = jsonwebtoken.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { token } = await got
|
||||
.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
.json();
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch} branch.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = jsonwebtoken.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { token } = await got
|
||||
.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
.json();
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import { saveBuildLog } from "../buildPacks/common";
|
||||
import { asyncExecShell } from "../common";
|
||||
import { executeCommand } from "../common";
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
repodir,
|
||||
htmlUrl,
|
||||
gitCommitHash,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort
|
||||
customPort,
|
||||
forPublic
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
@@ -19,23 +21,43 @@ export default async function ({
|
||||
branch: string;
|
||||
buildId: string;
|
||||
repodir: string;
|
||||
gitCommitHash: string;
|
||||
privateSshKey: string;
|
||||
customPort: number;
|
||||
forPublic: boolean;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
||||
if (!forPublic) {
|
||||
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||
}
|
||||
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch} branch.`,
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
if (forPublic) {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
|
||||
@@ -2,55 +2,28 @@ import Bree from 'bree';
|
||||
import path from 'path';
|
||||
import Cabin from 'cabin';
|
||||
import TSBree from '@breejs/ts-worker';
|
||||
import { isDev } from './common';
|
||||
|
||||
export const isDev = process.env.NODE_ENV === 'development';
|
||||
|
||||
Bree.extend(TSBree);
|
||||
|
||||
const options: any = {
|
||||
defaultExtension: 'js',
|
||||
logger: false,
|
||||
workerMessageHandler: async ({ name, message }) => {
|
||||
if (name === 'deployApplication') {
|
||||
if (message.pending === 0 && message.size === 0) {
|
||||
if (message.caller === 'autoUpdater') {
|
||||
if (!scheduler.workers.has('autoUpdater')) {
|
||||
await scheduler.stop('deployApplication');
|
||||
await scheduler.run('autoUpdater')
|
||||
}
|
||||
}
|
||||
if (message.caller === 'cleanupStorage') {
|
||||
if (!scheduler.workers.has('cleanupStorage')) {
|
||||
await scheduler.stop('deployApplication');
|
||||
await scheduler.run('cleanupStorage')
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
},
|
||||
// logger: false,
|
||||
// workerMessageHandler: async ({ name, message }) => {
|
||||
// if (name === 'deployApplication' && message?.deploying) {
|
||||
// if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
|
||||
// scheduler.workers.get('deployApplication').postMessage('cancel')
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
jobs: [
|
||||
{
|
||||
name: 'deployApplication'
|
||||
},
|
||||
{
|
||||
name: 'cleanupStorage',
|
||||
},
|
||||
{
|
||||
name: 'cleanupPrismaEngines',
|
||||
interval: '1m'
|
||||
},
|
||||
{
|
||||
name: 'checkProxies',
|
||||
interval: '10s'
|
||||
},
|
||||
{
|
||||
name: 'autoUpdater',
|
||||
}
|
||||
{ name: 'deployApplication' },
|
||||
],
|
||||
};
|
||||
if (isDev) options.root = path.join(__dirname, '../jobs');
|
||||
|
||||
|
||||
export const scheduler = new Bree(options);
|
||||
|
||||
|
||||
|
||||
51
apps/api/src/lib/services.ts
Normal file
51
apps/api/src/lib/services.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { isARM, isDev } from "./common";
|
||||
import fs from 'fs/promises';
|
||||
export async function getTemplates() {
|
||||
const templatePath = isDev ? './templates.json' : '/app/templates.json';
|
||||
const open = await fs.open(templatePath, 'r');
|
||||
try {
|
||||
let data = await open.readFile({ encoding: 'utf-8' });
|
||||
let jsonData = JSON.parse(data)
|
||||
if (isARM(process.arch)) {
|
||||
jsonData = jsonData.filter(d => d.arch !== 'amd64')
|
||||
}
|
||||
return jsonData;
|
||||
} catch (error) {
|
||||
return []
|
||||
} finally {
|
||||
await open?.close()
|
||||
}
|
||||
}
|
||||
const compareSemanticVersions = (a: string, b: string) => {
|
||||
const a1 = a.split('.');
|
||||
const b1 = b.split('.');
|
||||
const len = Math.min(a1.length, b1.length);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const a2 = +a1[i] || 0;
|
||||
const b2 = +b1[i] || 0;
|
||||
if (a2 !== b2) {
|
||||
return a2 > b2 ? 1 : -1;
|
||||
}
|
||||
}
|
||||
return b1.length - a1.length;
|
||||
};
|
||||
export async function getTags(type: string) {
|
||||
|
||||
try {
|
||||
if (type) {
|
||||
const tagsPath = isDev ? './tags.json' : '/app/tags.json';
|
||||
const data = await fs.readFile(tagsPath, 'utf8')
|
||||
let tags = JSON.parse(data)
|
||||
if (tags) {
|
||||
tags = tags.find((tag: any) => tag.name.includes(type))
|
||||
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
|
||||
return tags
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return []
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
39
apps/api/src/lib/services/common.ts
Normal file
39
apps/api/src/lib/services/common.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
import { decrypt, prisma } from '../common';
|
||||
|
||||
export async function removeService({ id }: { id: string }): Promise<void> {
|
||||
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.fider.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.ghost.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.umami.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.hasura.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.minio.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.wordpress.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.glitchTip.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
|
||||
await prisma.service.delete({ where: { id } });
|
||||
}
|
||||
export async function verifyAndDecryptServiceSecrets(id: string) {
|
||||
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
|
||||
let decryptedSecrets = secrets.map(secret => {
|
||||
const { name, value } = secret
|
||||
if (value) {
|
||||
let rawValue = decrypt(value)
|
||||
rawValue = rawValue.replaceAll(/\$/gi, '$$$')
|
||||
return { name, value: rawValue }
|
||||
}
|
||||
return { name, value }
|
||||
|
||||
})
|
||||
return decryptedSecrets
|
||||
}
|
||||
261
apps/api/src/lib/services/handlers.ts
Normal file
261
apps/api/src/lib/services/handlers.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import path from 'path';
|
||||
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeCommand, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
||||
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
||||
|
||||
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
||||
import { OnlyId } from '../../types';
|
||||
import { verifyAndDecryptServiceSecrets } from './common';
|
||||
|
||||
export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const teamId = request.user.teamId;
|
||||
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
||||
if (destinationDockerId) {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
|
||||
})
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
return {}
|
||||
}
|
||||
throw { status: 500, message: 'Could not stop containers.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function startService(request: FastifyRequest<ServiceStartStop>, fastify: any) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const teamId = request.user.teamId;
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
const arm = isARM(service.arch);
|
||||
const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } =
|
||||
service;
|
||||
|
||||
const { workdir } = await createDirectories({ repository: type, buildId: id });
|
||||
const template: any = await parseAndFindServiceTemplates(service, workdir, true)
|
||||
const network = destinationDockerId && destinationDocker.network;
|
||||
const config = {};
|
||||
for (const s in template.services) {
|
||||
let newEnvironments = []
|
||||
if (arm) {
|
||||
if (template.services[s]?.environmentArm?.length > 0) {
|
||||
for (const environment of template.services[s].environmentArm) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=")
|
||||
if (!value.startsWith('$$secret') && value !== '') {
|
||||
newEnvironments.push(`${env}=${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (template.services[s]?.environment?.length > 0) {
|
||||
for (const environment of template.services[s].environment) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=")
|
||||
if (!value.startsWith('$$secret') && value !== '') {
|
||||
newEnvironments.push(`${env}=${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const secrets = await verifyAndDecryptServiceSecrets(id)
|
||||
for (const secret of secrets) {
|
||||
const { name, value } = secret
|
||||
if (value) {
|
||||
const foundEnv = !!template.services[s].environment?.find(env => env.startsWith(`${name}=`))
|
||||
const foundNewEnv = !!newEnvironments?.find(env => env.startsWith(`${name}=`))
|
||||
if (foundEnv && !foundNewEnv) {
|
||||
newEnvironments.push(`${name}=${value}`)
|
||||
}
|
||||
if (!foundEnv && !foundNewEnv && s === id) {
|
||||
newEnvironments.push(`${name}=${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
const customVolumes = await prisma.servicePersistentStorage.findMany({ where: { serviceId: id } })
|
||||
let volumes = new Set()
|
||||
if (arm) {
|
||||
template.services[s]?.volumesArm && template.services[s].volumesArm.length > 0 && template.services[s].volumesArm.forEach(v => volumes.add(v))
|
||||
} else {
|
||||
template.services[s]?.volumes && template.services[s].volumes.length > 0 && template.services[s].volumes.forEach(v => volumes.add(v))
|
||||
}
|
||||
|
||||
// Workaround: old plausible analytics service wrong volume id name
|
||||
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics?.id) {
|
||||
let temp = Array.from(volumes)
|
||||
temp.forEach(a => {
|
||||
const t = a.replace(service.id, service.plausibleAnalytics.id)
|
||||
volumes.delete(a)
|
||||
volumes.add(t)
|
||||
})
|
||||
}
|
||||
|
||||
if (customVolumes.length > 0) {
|
||||
for (const customVolume of customVolumes) {
|
||||
const { volumeName, path, containerId } = customVolume
|
||||
if (volumes && volumes.size > 0 && !volumes.has(`${volumeName}:${path}`) && containerId === service) {
|
||||
volumes.add(`${volumeName}:${path}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
let ports = []
|
||||
if (template.services[s].proxy?.length > 0) {
|
||||
for (const proxy of template.services[s].proxy) {
|
||||
if (proxy.hostPort) {
|
||||
ports.push(`${proxy.hostPort}:${proxy.port}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (template.services[s].ports?.length === 1) {
|
||||
for (const port of template.services[s].ports) {
|
||||
if (exposePort) {
|
||||
ports.push(`${exposePort}:${port}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let image = template.services[s].image
|
||||
if (arm && template.services[s].imageArm) {
|
||||
image = template.services[s].imageArm
|
||||
}
|
||||
config[s] = {
|
||||
container_name: s,
|
||||
build: template.services[s].build || undefined,
|
||||
command: template.services[s].command,
|
||||
entrypoint: template.services[s]?.entrypoint,
|
||||
image,
|
||||
expose: template.services[s].ports,
|
||||
ports: ports.length > 0 ? ports : undefined,
|
||||
volumes: Array.from(volumes),
|
||||
environment: newEnvironments,
|
||||
depends_on: template.services[s]?.depends_on,
|
||||
ulimits: template.services[s]?.ulimits,
|
||||
cap_drop: template.services[s]?.cap_drop,
|
||||
cap_add: template.services[s]?.cap_add,
|
||||
labels: makeLabelForServices(type),
|
||||
...defaultComposeConfiguration(network),
|
||||
}
|
||||
// Generate files for builds
|
||||
if (template.services[s]?.files?.length > 0) {
|
||||
if (!config[s].build) {
|
||||
config[s].build = {
|
||||
context: workdir,
|
||||
dockerfile: `Dockerfile.${s}`
|
||||
}
|
||||
}
|
||||
let Dockerfile = `
|
||||
FROM ${template.services[s].image}`
|
||||
for (const file of template.services[s].files) {
|
||||
const { location, content } = file;
|
||||
const source = path.join(workdir, location);
|
||||
await fs.mkdir(path.dirname(source), { recursive: true });
|
||||
await fs.writeFile(source, content);
|
||||
Dockerfile += `
|
||||
COPY .${location} ${location}`
|
||||
}
|
||||
await fs.writeFile(`${workdir}/Dockerfile.${s}`, Dockerfile);
|
||||
}
|
||||
}
|
||||
const { volumeMounts } = persistentVolumes(id, persistentStorage, config)
|
||||
const composeFile: ComposeFile = {
|
||||
version: '3.8',
|
||||
services: config,
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: volumeMounts
|
||||
}
|
||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||
await startServiceContainers(fastify, id, teamId, destinationDocker.id, composeFileDestination)
|
||||
|
||||
// Workaround: Stop old minio proxies
|
||||
if (service.type === 'minio') {
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command:
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command:
|
||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
||||
try {
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
||||
} catch (error) { }
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
||||
await asyncSleep(1000);
|
||||
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
||||
}
|
||||
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user.teamId;
|
||||
const {
|
||||
destinationDockerId,
|
||||
destinationDocker,
|
||||
} = await getServiceFromDB({ id, teamId });
|
||||
if (destinationDockerId) {
|
||||
await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker exec ${id} migrate`
|
||||
})
|
||||
return await reply.code(201).send()
|
||||
}
|
||||
throw { status: 500, message: 'Could cleanup logs.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -326,7 +326,7 @@ export const fider = [{
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}, {
|
||||
name: 'postgreslUser',
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
@@ -476,4 +476,392 @@ export const moodle = [{
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
|
||||
export const appwrite = [{
|
||||
name: 'opensslKeyV1',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'executorSecret',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'redisPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbHost',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbRootUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mariadbRootUserPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'mariadbDatabase',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
|
||||
export const glitchTip = [{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPublicPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'secretKeyBase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpHost',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPassword',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUseSsl',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUseSsl',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPort',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUser',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'defaultEmail',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'defaultUsername',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'defaultPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'defaultEmailFrom',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailUrl',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailBackend',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'mailgunApiKey',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'sendgridApiKey',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'enableOpenUserRegistration',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
}]
|
||||
|
||||
export const searxng = [{
|
||||
name: 'secretKey',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'redisPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
|
||||
export const weblate = [{
|
||||
name: 'adminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlHost',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
export const taiga = [{
|
||||
name: 'secretKey',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'djangoAdminUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'djangoAdminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'rabbitMQUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'rabbitMQPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlHost',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
@@ -21,7 +21,6 @@ export default fp<FastifyJWTOptions>(async (fastify, opts) => {
|
||||
try {
|
||||
await request.jwtVerify()
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
reply.send(err)
|
||||
}
|
||||
})
|
||||
|
||||
29
apps/api/src/realtime/index.ts
Normal file
29
apps/api/src/realtime/index.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
|
||||
export default async (fastify) => {
|
||||
fastify.io.use((socket, next) => {
|
||||
const { token } = socket.handshake.auth;
|
||||
if (token && fastify.jwt.verify(token)) {
|
||||
next();
|
||||
} else {
|
||||
return next(new Error("unauthorized event"));
|
||||
}
|
||||
});
|
||||
fastify.io.on('connection', (socket: any) => {
|
||||
const { token } = socket.handshake.auth;
|
||||
const { teamId } = fastify.jwt.decode(token);
|
||||
socket.join(teamId);
|
||||
// console.info('Socket connected!', socket.id)
|
||||
// console.info('Socket joined team!', teamId)
|
||||
// socket.on('message', (message) => {
|
||||
// console.log(message)
|
||||
// })
|
||||
// socket.on('error', (err) => {
|
||||
// console.log(err)
|
||||
// })
|
||||
})
|
||||
// fastify.io.on("error", (err) => {
|
||||
// if (err && err.message === "unauthorized event") {
|
||||
// fastify.io.disconnect();
|
||||
// }
|
||||
// });
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { OnlyId } from '../../../../types';
|
||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers';
|
||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getDockerImages, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRegistry, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
||||
|
||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, GetImages, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartApplication, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
@@ -11,6 +11,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/', async (request) => await listApplications(request));
|
||||
fastify.post<GetImages>('/images', async (request) => await getImages(request));
|
||||
|
||||
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredApplications(request));
|
||||
|
||||
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
|
||||
|
||||
fastify.get<OnlyId>('/:id', async (request) => await getApplication(request));
|
||||
@@ -19,6 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
||||
|
||||
fastify.post<RestartApplication>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
||||
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
||||
|
||||
@@ -29,6 +32,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request));
|
||||
fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
|
||||
fastify.put<SaveSecret>('/:id/secrets', async (request, reply) => await updateSecret(request, reply));
|
||||
fastify.put<SaveSecret>('/:id/secrets/preview', async (request, reply) => await updatePreviewSecret(request, reply));
|
||||
fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request));
|
||||
@@ -36,12 +41,18 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request));
|
||||
fastify.post<OnlyId>('/:id/previews/load', async (request) => await loadPreviews(request));
|
||||
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
||||
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
||||
|
||||
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
|
||||
fastify.get<GetBuildLogs>('/:id/logs/build', async (request) => await getBuildLogs(request));
|
||||
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
|
||||
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
||||
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||
|
||||
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
||||
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
|
||||
|
||||
fastify.get('/:id/images', async (request) => await getDockerImages(request))
|
||||
|
||||
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
||||
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
||||
@@ -54,6 +65,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/registry', async (request, reply) => await saveRegistry(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
||||
|
||||
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||
fastify.post<OnlyId>('/:id/configuration/sshkey', async (request, reply) => await saveGitLabSSHKey(request, reply));
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user