Compare commits

..

243 Commits

Author SHA1 Message Date
Andras Bacsai
78f0e6ff6b Update package.json 2022-12-27 14:22:49 +01:00
Andras Bacsai
3af97af634 Update common.ts 2022-12-27 14:22:29 +01:00
Andras Bacsai
2c2663c8a4 Update common.ts 2022-12-27 14:20:19 +01:00
Andras Bacsai
1122b8a2f7 Update index.ts 2022-12-27 13:48:19 +01:00
Andras Bacsai
5b9f38948b Update index.ts 2022-12-27 13:46:33 +01:00
Andras Bacsai
507eb3b424 Update common.ts 2022-12-27 13:40:00 +01:00
Andras Bacsai
56fbc0ed6c Update package.json 2022-12-27 13:39:38 +01:00
Andras Bacsai
7aaad314e3 Update common.ts 2022-12-27 13:39:03 +01:00
Andras Bacsai
356949dd54 Merge pull request #811 from coollabsio/next
v3.12.5
2022-12-26 21:51:09 +01:00
Andras Bacsai
9878baca53 Update index.ts 2022-12-26 21:29:54 +01:00
Andras Bacsai
9cbc7c2939 Merge pull request #809 from Tiagofv/main
Fix bug: value.environment is not iterable
2022-12-26 21:19:11 +01:00
Andras Bacsai
4680b63911 fix: cleanupstorage 2022-12-26 21:17:53 +01:00
Tiago Braga
ce4a2d95f2 fix: remove unused imports 2022-12-24 16:28:02 -03:00
Tiago Braga
b2e048de8d Fix: conditional on environment 2022-12-24 16:27:10 -03:00
Andras Bacsai
d25a9d7515 devtemplates update 2022-12-22 11:31:46 +01:00
Andras Bacsai
dc130d3705 new pocketbase version 2022-12-22 11:22:37 +01:00
Andras Bacsai
c8f7ca920e wip: trpc 2022-12-21 15:06:33 +01:00
Andras Bacsai
e3e39af6fb remove console.log 2022-12-21 14:11:07 +01:00
Andras Bacsai
f38114f5a5 Merge pull request #802 from coollabsio/next
v3.12.4
2022-12-21 13:25:46 +01:00
Andras Bacsai
1ee9d041df fix: duplicate env variables 2022-12-21 13:24:30 +01:00
Andras Bacsai
9c6f412f04 wip: trpc 2022-12-21 13:06:44 +01:00
Andras Bacsai
4fa0f2d04a fix: gh actions 2022-12-21 12:47:42 +01:00
Andras Bacsai
e566a66ea4 test 2022-12-21 12:33:27 +01:00
Andras Bacsai
58a42abc67 test 2022-12-21 11:40:47 +01:00
Andras Bacsai
5676bd9d0d test 2022-12-21 11:24:19 +01:00
Andras Bacsai
9691010e7b test 2022-12-21 11:11:55 +01:00
Andras Bacsai
d19be3ad52 Merge pull request #801 from coollabsio/next
v3.12.3
2022-12-21 10:53:09 +01:00
Andras Bacsai
ec3cbf788b fix: secrets 2022-12-21 10:40:27 +01:00
Andras Bacsai
1282fd0b76 fix: secrets 2022-12-21 10:11:03 +01:00
Andras Bacsai
93430e5607 fix: add default node_env variable 2022-12-19 23:07:01 +01:00
Andras Bacsai
14201f4052 fix: add default node_env variable 2022-12-19 22:15:00 +01:00
Andras Bacsai
47979bf16d fix: secrets 2022-12-19 22:11:21 +01:00
Andras Bacsai
29530f3b17 fix: secrets with newline 2022-12-19 21:48:31 +01:00
Andras Bacsai
cb1d86d08b Merge pull request #796 from coollabsio/next
v3.12.2
2022-12-19 11:59:03 +01:00
Andras Bacsai
88f3f628ef fix: docker buildpack env 2022-12-19 11:51:44 +01:00
Andras Bacsai
295bea37bc fix: envs 2022-12-19 11:01:29 +01:00
Andras Bacsai
bd7d756254 fix: escape env vars 2022-12-19 10:22:11 +01:00
Andras Bacsai
4261147fe8 fix: escape secrets 2022-12-19 10:04:28 +01:00
Andras Bacsai
a70adc5eb3 fix: root user for dbs on arm 2022-12-19 09:52:50 +01:00
Andras Bacsai
06d40b8a81 debug secret problem 2022-12-14 12:34:13 +01:00
Andras Bacsai
2358510cba Update --bug-report.yaml 2022-12-13 21:30:52 +01:00
Andras Bacsai
e6d13cb7d7 Update --bug-report.yaml 2022-12-13 21:29:42 +01:00
Andras Bacsai
39e21c3f36 chore: version++ 2022-12-13 13:32:50 +01:00
Andras Bacsai
8da900ee72 fix: do not replace secret 2022-12-13 13:32:11 +01:00
Andras Bacsai
9f4e81a1a3 wip: trpc 2022-12-13 13:22:45 +01:00
Andras Bacsai
0b918c2f51 wip: trpc 2022-12-13 13:17:32 +01:00
Andras Bacsai
085cd2a314 wip: trpc 2022-12-13 13:15:23 +01:00
Andras Bacsai
98d2399568 wip: trpc 2022-12-13 13:13:28 +01:00
Andras Bacsai
515d9a0008 wip: trpc 2022-12-13 13:11:49 +01:00
Andras Bacsai
aece1fa7d3 wip: trpc 2022-12-13 13:04:47 +01:00
Andras Bacsai
abc614ecfd wip: trpc 2022-12-13 12:54:57 +01:00
Andras Bacsai
1180d3fdde wip: trpc 2022-12-13 12:47:14 +01:00
Andras Bacsai
1639d1725a Merge branch 'main' into next 2022-12-13 09:35:33 +01:00
Andras Bacsai
5df1deecbc update templates and tags 2022-12-13 09:34:42 +01:00
Andras Bacsai
fe3c0cf76e fix: appwrite tmp volume 2022-12-13 09:17:33 +01:00
Andras Bacsai
cc0df0182c Merge pull request #785 from jugglingjsons/main
fix: adding missing appwrite volume
2022-12-13 09:15:11 +01:00
Andras Bacsai
eb354f639f chore: version++ 2022-12-13 09:05:11 +01:00
jugglingjsons
02c530dcbe fix: adding missing appwrite volume 2022-12-12 19:11:40 +01:00
Andras Bacsai
4ad7e1f8e6 wip 2022-12-12 16:04:41 +01:00
Andras Bacsai
2007ba0c3b fix: build commands 2022-12-12 14:52:56 +01:00
Andras Bacsai
2009dc11db wip trpc 2022-12-12 14:48:56 +01:00
Andras Bacsai
62f2196a0c Merge branch 'next' into trpc 2022-12-12 09:38:38 +01:00
Andras Bacsai
e63c65da4f Merge pull request #775 from hyddeos/main
Add link to the documentation
2022-12-12 09:11:18 +01:00
Andras Bacsai
570a082227 Merge pull request #776 from rickaard/close-sidedrawer
Close the sidedrawer when clicking a link in mobile view
2022-12-12 09:10:31 +01:00
Andras Bacsai
9b1ede3a59 fix: migration file 2022-12-12 09:08:00 +01:00
Andras Bacsai
c445fc0f8a wip 2022-12-12 08:44:23 +01:00
Rickard Jonsson
699493cf24 Make sure sidedrawer is closed on link click 2022-12-11 20:59:02 +01:00
hyddeos
6c89686f31 Add link to the documentation 2022-12-11 20:32:15 +01:00
Andras Bacsai
f55b861849 fix: cleanup 2022-12-09 14:32:22 +01:00
Andras Bacsai
adf82c04ad Merge pull request #747 from coollabsio/next
v3.12.0
2022-12-09 14:29:33 +01:00
Andras Bacsai
1b80956fe8 fix: public db icon on dashboard 2022-12-09 14:08:21 +01:00
Andras Bacsai
de9da8caf9 fix 2022-12-09 13:59:43 +01:00
Andras Bacsai
967f42dd89 add shell to some cmds 2022-12-09 13:46:06 +01:00
Andras Bacsai
95e8b29fa2 fix: wrong port in case of docker compose 2022-12-09 11:21:25 +01:00
Andras Bacsai
2e3c815e53 fix: delete resource on dashboard 2022-12-07 15:27:26 +01:00
Andras Bacsai
132707caa7 fix: rde 2022-12-07 14:46:12 +01:00
Andras Bacsai
0dad616c38 fixes 2022-12-07 13:45:56 +01:00
Andras Bacsai
c0882dffde Merge pull request #766 from twisttaan/feat/name-label
feat(api): name label
2022-12-07 13:45:11 +01:00
Andras Bacsai
5e082c647c fixes 2022-12-07 12:17:06 +01:00
Tristan Camejo
285c3c2f5d feat(api): name label 2022-12-07 00:38:08 +00:00
Andras Bacsai
dcb29a80fe fix 2022-12-06 10:29:14 +01:00
Andras Bacsai
b45ad19732 fix: security hole 2022-12-06 10:27:51 +01:00
Andras Bacsai
f12d453b5f backups... backups everywhere 2022-12-02 14:34:06 +01:00
Andras Bacsai
8a00b711be add pocketbase 2022-12-02 10:00:27 +01:00
Andras Bacsai
56204efc7a update workflow 2022-12-02 09:44:29 +01:00
Andras Bacsai
da638c270f infra: pocketbase release 2022-12-02 09:41:22 +01:00
Andras Bacsai
ad4b974274 fix: turn off autodeploy for simpledockerfiles 2022-12-01 16:50:54 +01:00
Andras Bacsai
943a05edcc fixes 2022-12-01 16:29:38 +01:00
Andras Bacsai
1a28e65e50 feat: revert to remote image 2022-12-01 15:51:18 +01:00
Andras Bacsai
cd3af7fa39 fix: failed builds should not push images 2022-12-01 15:05:21 +01:00
Andras Bacsai
8ccb0c88db feat: able to push image to docker registry 2022-12-01 14:39:02 +01:00
Andras Bacsai
127880cf8d schema prettify 2022-12-01 13:29:45 +01:00
Andras Bacsai
2e56086661 feat: simpleDockerfile deployment 2022-12-01 12:58:45 +01:00
Andras Bacsai
a129be0dbd fixes 2022-12-01 10:23:43 +01:00
Andras Bacsai
12c0760cb3 fixes 2022-12-01 09:51:56 +01:00
Andras Bacsai
9d3ed85ffd haha 2022-11-30 15:50:45 +01:00
Andras Bacsai
850d57d0d2 fix text haha 2022-11-30 15:49:39 +01:00
Andras Bacsai
7981bec1ed text changes 2022-11-30 15:47:54 +01:00
Andras Bacsai
76373a8597 feat: save application data before deploying 2022-11-30 15:40:27 +01:00
Andras Bacsai
9913e7b70b feat: specific git commit deployment
feat: revert to specific image
fix: no system wide docker registries
2022-11-30 15:22:07 +01:00
Andras Bacsai
a08bb25bfa fix: static for arm 2022-11-30 11:45:39 +01:00
Andras Bacsai
28ec164bc2 fix: update PR/MRs with new previewSeparator 2022-11-30 11:36:05 +01:00
Andras Bacsai
3d5ea8629c fix: apache on arm 2022-11-30 11:18:19 +01:00
Andras Bacsai
4aaf59d034 update templates and tags 2022-11-30 11:07:44 +01:00
Andras Bacsai
14850476c7 feat: able to host static/php sites on arm 2022-11-30 11:00:03 +01:00
Andras Bacsai
bf5b6170fa remove console log 2022-11-29 15:47:25 +01:00
Andras Bacsai
6f91591448 fix: webhook previewseparator 2022-11-29 15:45:18 +01:00
Andras Bacsai
3c723bcba2 fix: remove sentry before migration 2022-11-29 15:13:05 +01:00
Andras Bacsai
e7dd13cffa fix: git checkout 2022-11-29 15:10:34 +01:00
Andras Bacsai
ad91630faa fix: remove beta from systemwide git 2022-11-29 15:05:31 +01:00
Andras Bacsai
57f746b584 fix: login error 2022-11-29 14:55:40 +01:00
Andras Bacsai
a55720091c fix: prevent webhook errors to be logged 2022-11-29 14:50:24 +01:00
Andras Bacsai
b461635834 debug 2022-11-29 14:44:53 +01:00
Andras Bacsai
1375580651 fix: migrations 2022-11-29 14:01:19 +01:00
Andras Bacsai
3d20433ad1 feat: sentry frontend 2022-11-29 13:59:03 +01:00
Andras Bacsai
58447c6456 update migration 2022-11-29 13:39:00 +01:00
Andras Bacsai
c6273e9177 feat: custom previewseparator 2022-11-29 13:29:11 +01:00
Andras Bacsai
ffdc158d44 fix: only visible with publicrepo 2022-11-29 13:13:04 +01:00
Andras Bacsai
876c81fad8 fix: ui 2022-11-29 13:00:44 +01:00
Andras Bacsai
028ee6d7b1 feat: deploy specific commit for apps
feat: keep number of images locally to revert quickly
2022-11-29 11:47:20 +01:00
Andras Bacsai
ec00548f1b feat: system wide git out of beta 2022-11-29 10:53:05 +01:00
Andras Bacsai
c4dc03e4a8 Merge pull request #700 from ThallesP/main
feature: initial support for specific git commit
2022-11-29 10:52:21 +01:00
Andras Bacsai
3a510a77ec Merge branch 'next' into main 2022-11-29 10:50:00 +01:00
Andras Bacsai
98a785fced tags 2022-11-29 10:36:19 +01:00
Andras Bacsai
c48654160d fixes 2022-11-29 10:35:56 +01:00
Andras Bacsai
55b80132c4 fixes 2022-11-29 09:43:28 +01:00
Andras Bacsai
1f0c168936 fixes 2022-11-29 09:42:36 +01:00
Andras Bacsai
6715bc750f Merge pull request #721 from gabrielengel/g-i18n
Starting translations work
2022-11-29 09:24:52 +01:00
Andras Bacsai
04a48a626b Merge pull request #746 from gabrielengel/refactor-servers
Componentization of /servers and /sources (depends on badges merge)
2022-11-29 09:22:08 +01:00
Andras Bacsai
2f9f0da7c6 Merge pull request #745 from gabrielengel/new-badges
New Badges components: destination, public, status, teams
2022-11-29 09:21:30 +01:00
Andras Bacsai
513c4f9e29 fixes 2022-11-29 09:19:10 +01:00
Andras Bacsai
3f078517a0 fix: dnt 2022-11-28 14:29:14 +01:00
Andras Bacsai
37036f0fca fix: sentry dsn update 2022-11-28 13:57:18 +01:00
Andras Bacsai
5789aadb5c feat: do not track in settings 2022-11-28 13:55:49 +01:00
Andras Bacsai
a768ed718a update sentry 2022-11-28 12:56:43 +01:00
Andras Bacsai
9c6092f31f fix: seed 2022-11-28 12:53:44 +01:00
Andras Bacsai
40d294a247 feat: add default sentry 2022-11-28 12:02:10 +01:00
Andras Bacsai
72844e4edc feat: save doNotTrackData to db 2022-11-28 11:48:38 +01:00
Andras Bacsai
db0a71125a version++ 2022-11-28 11:28:54 +01:00
Andras Bacsai
da244af39d fixes 2022-11-28 11:27:03 +01:00
Andras Bacsai
067f502d3c feat: custom docker compose file location in repo 2022-11-28 10:21:11 +01:00
Andras Bacsai
fffc6b1e4e feat: docker registries working 2022-11-25 15:44:11 +01:00
Andras Bacsai
9121c6a078 fix: 0 destinations redirect after creation 2022-11-25 15:43:59 +01:00
Andras Bacsai
9c4e581d8b feat: use registry for building 2022-11-25 14:29:01 +01:00
Andras Bacsai
dfadd31f46 Merge pull request #748 from zarxor/main
Typing error in CONTRIBUTION.md
2022-11-25 13:08:16 +01:00
Johan Boström
0cfa6fff43 Typing error in CONTRIBUTION.md 2022-11-23 21:00:01 +01:00
Andras Bacsai
d61671c1a0 wip 2022-11-23 15:44:30 +01:00
Andras Bacsai
d4f10a9af3 feat: custom/private docker registries 2022-11-23 14:39:30 +01:00
Andras Bacsai
03861af893 fix: nope in database strings 2022-11-23 13:40:10 +01:00
Andras Bacsai
ae531c445d fix: remove hardcoded sentry dsn 2022-11-23 13:39:16 +01:00
Andras Bacsai
4b26aeef9a fix: remote haproxy password/etc 2022-11-23 13:39:16 +01:00
Andras Bacsai
1e47b79b50 chore: version++ 2022-11-23 13:39:16 +01:00
Andras Bacsai
0c223dcec4 Merge pull request #698 from themarkwill/fix/errorInBaseApi
fix: Accept logged and not logged user in /base
2022-11-23 13:36:39 +01:00
Andras Bacsai
0f4536c3d3 Merge pull request #744 from coollabsio/next
v3.11.13
2022-11-23 13:08:13 +01:00
Andras Bacsai
f43c584463 prettify 2022-11-23 13:07:45 +01:00
Gabriel Engel
91c558ec83 Componentization of /servers and /sources 2022-11-23 08:17:03 -03:00
Gabriel Engel
9d45ab3246 New Badges components: destination, public, status, teams + container/status 2022-11-23 07:52:59 -03:00
Andras Bacsai
34ff6eb567 fix: load logs after build failed 2022-11-23 11:51:19 +01:00
Andras Bacsai
8793c00438 fix: mounts 2022-11-23 11:48:31 +01:00
Andras Bacsai
d7981d5c3e fix: logs 2022-11-23 11:48:04 +01:00
Andras Bacsai
bcaae3b67b debug off
fix: logging
2022-11-23 11:37:52 +01:00
Andras Bacsai
046d9f9597 debug 2022-11-23 11:24:15 +01:00
Andras Bacsai
81bd0301d2 fix: hasura admin secret 2022-11-23 11:18:25 +01:00
Andras Bacsai
530e7e494f fix: storage for compose bp + debug on 2022-11-23 10:57:52 +01:00
Andras Bacsai
d402fd5690 fix: move debug log settings to build logs 2022-11-23 10:28:36 +01:00
Andras Bacsai
eebec3b92f fix: escape % in secrets 2022-11-23 10:17:09 +01:00
Andras Bacsai
211c6585fa chore: version++ 2022-11-22 13:17:25 +01:00
Andras Bacsai
e1b5c40ca0 update templates 2022-11-22 13:17:09 +01:00
Andras Bacsai
747a9b521b fix: wrong icons on dashboard 2022-11-22 13:16:47 +01:00
Andras Bacsai
c2d72ad309 Merge pull request #742 from coollabsio/next
v3.11.12
2022-11-22 11:20:40 +01:00
Andras Bacsai
596181b622 update packages 2022-11-22 10:55:52 +01:00
Andras Bacsai
77c5270e1e chore: version++ 2022-11-22 10:47:21 +01:00
Andras Bacsai
a663c14df8 fix: exposed ports 2022-11-22 10:47:02 +01:00
Andras Bacsai
3bd9f00268 Merge pull request #741 from coollabsio/next
v3.11.11
2022-11-21 22:03:07 +01:00
Andras Bacsai
1aadda735d fix: webhook traefik 2022-11-21 21:58:07 +01:00
Andras Bacsai
12035208e2 fix: replace $$generate vars 2022-11-21 21:54:21 +01:00
Andras Bacsai
df8a9f673c fix: gh actions 2022-11-18 14:49:20 +01:00
Andras Bacsai
aa5c8a2c56 fix: gh actions 2022-11-18 14:48:31 +01:00
Andras Bacsai
a84540e6bb fix: gitea icon is svg 2022-11-18 14:47:23 +01:00
Andras Bacsai
fb91b64063 Merge pull request #730 from quiint/patch-1
Create Gitea icon
2022-11-18 14:45:01 +01:00
Andras Bacsai
94cc77ebca feat: only show expose if no proxy conf defined in template 2022-11-18 14:33:58 +01:00
Andras Bacsai
aac6981304 fix: no variables in template
feat: hostPort proxy conf from template
2022-11-18 14:28:05 +01:00
Andras Bacsai
ca05828b68 ga fixes 2022-11-18 11:21:41 +01:00
Andras Bacsai
8ec6b4c59c ga fixes 2022-11-18 11:19:15 +01:00
Andras Bacsai
f1be5f5341 ga fixes 2022-11-18 11:17:04 +01:00
Andras Bacsai
714c264002 fluentbit github release 2022-11-18 11:07:52 +01:00
Andras Bacsai
eca58097ef Merge pull request #733 from coollabsio/next
v3.11.10
2022-11-16 14:24:54 +01:00
Andras Bacsai
281146e22b chore: version++ 2022-11-16 12:46:29 +00:00
Andras Bacsai
f3a19a5d02 fix: wrong template/type 2022-11-16 12:40:44 +00:00
Andras Bacsai
9b9b6937f4 fix: local dev api/ws urls 2022-11-16 12:40:28 +00:00
Andras Bacsai
f54c0b7dff fix: isBot issue 2022-11-15 19:13:46 +00:00
Quiint
36c58ad286 Create gitea.svg 2022-11-14 09:54:46 -05:00
Andras Bacsai
a67f633259 Merge pull request #726 from coollabsio/next
v3.11.8
2022-11-14 14:24:52 +01:00
Andras Bacsai
f39a607c1a fix: default icon for new services 2022-11-14 13:54:06 +01:00
Andras Bacsai
0cc67ed2e5 update embeded templates 2022-11-14 13:46:17 +01:00
Andras Bacsai
5f8402c645 Merge pull request #727 from ksmithdev/main
Create keycloak.png
2022-11-14 12:59:29 +01:00
Andras Bacsai
3ab87cd11e ui: reload compose loading 2022-11-14 11:53:53 +01:00
Andras Bacsai
d5620d305d fix: ports for services 2022-11-14 11:49:32 +01:00
Andras Bacsai
35ebc5e842 fix: empty secrets on UI 2022-11-14 11:37:36 +01:00
Andras Bacsai
66276be1d2 fix: volume names for undefined volume names in compose 2022-11-14 11:26:12 +01:00
Andras Bacsai
47c0d522db chore: version++ 2022-11-14 11:00:25 +01:00
Andras Bacsai
b654883d1a ui: fixes 2022-11-14 10:59:19 +01:00
Andras Bacsai
b4f9d29129 fix: application persistent storage things 2022-11-14 10:40:28 +01:00
Andras Bacsai
bec6b961f3 fix: docker compose persistent volumes 2022-11-14 09:11:02 +01:00
Kyle Smith
2ce8f34306 Create keycloak.png 2022-11-11 14:03:05 -05:00
Andras Bacsai
30d1ae59ec revert: revert: revert 2022-11-11 14:25:02 +01:00
Andras Bacsai
ac7d4e3645 fix: getTemplates 2022-11-11 14:19:42 +01:00
Andras Bacsai
868c4001f6 gh action: revert 2022-11-11 14:17:53 +01:00
Andras Bacsai
e99c44d967 gh actions: update prod release flow 2022-11-11 13:41:02 +01:00
Andras Bacsai
48a877f160 Merge pull request #725 from coollabsio/next
v3.11.7
2022-11-11 13:33:57 +01:00
Andras Bacsai
cea894a8bd fix: dashboard error 2022-11-11 13:28:37 +01:00
Andras Bacsai
087e7b9311 Merge pull request #724 from coollabsio/next
v3.11.6
2022-11-11 11:58:46 +01:00
Andras Bacsai
39ba498293 ui: fix 2022-11-11 10:39:01 +01:00
Andras Bacsai
fe7390bd4d fix: update on mobile 2022-11-11 10:38:30 +01:00
Andras Bacsai
75af551435 ui: secrets on apps 2022-11-11 09:33:45 +01:00
Andras Bacsai
ae2d3ebb48 fix: no tags error 2022-11-11 09:25:02 +01:00
Andras Bacsai
5ff6c53715 Merge pull request #723 from coollabsio/next
v3.11.5
2022-11-11 08:28:37 +01:00
Andras Bacsai
3c94723b23 fix: show rollback button loading 2022-11-10 15:43:28 +01:00
Andras Bacsai
c6a2e3e328 update tags 2022-11-10 15:34:33 +01:00
Andras Bacsai
2dc5e10878 update tags 2022-11-10 15:33:57 +01:00
Andras Bacsai
4086dfcf56 rename lavalink 2022-11-10 15:32:13 +01:00
Andras Bacsai
7937c2bab0 Merge pull request #717 from kaname-png/next
chore: add jda icon for lavalink service
2022-11-10 15:31:37 +01:00
Andras Bacsai
5ffa8e9936 update templates 2022-11-10 15:29:44 +01:00
Andras Bacsai
c431cee517 fix: wp + mysql on arm 2022-11-10 15:01:03 +01:00
Andras Bacsai
375f17e728 debug 2022-11-10 14:52:37 +01:00
Andras Bacsai
d3f658c874 Readme fix 2022-11-10 14:17:20 +01:00
Andras Bacsai
5e340a4cdd fix: expose ports for services 2022-11-10 14:13:58 +01:00
Andras Bacsai
409a5b9f99 fix: n8n and weblate icon 2022-11-10 14:08:02 +01:00
Andras Bacsai
fba305020b fix: for rollback 2022-11-10 14:00:01 +01:00
Andras Bacsai
bd4ce3ac45 feat: rollback coolify 2022-11-10 13:57:34 +01:00
Gabriel Engel
733de60f7c Starting translations work 2022-11-09 19:27:03 -03:00
Andras Bacsai
c365a44e01 Merge pull request #719 from coollabsio/next
v3.11.4
2022-11-09 14:20:23 +01:00
Andras Bacsai
e94f450bf0 fix: doc links 2022-11-09 13:50:29 +01:00
Andras Bacsai
d5efc9ddde chore: version++ 2022-11-09 13:50:20 +01:00
Andras Bacsai
68895ba4a5 fix: variable replacements 2022-11-09 13:50:11 +01:00
Andras Bacsai
139aa7a0fc Merge pull request #718 from coollabsio/next
v3.11.3
2022-11-09 13:05:58 +01:00
Andras Bacsai
4955157e13 fix: compose webhooks fixed 2022-11-09 13:02:42 +01:00
Kaname
f2dd5cc75e chore: add jda icon for lavalink service 2022-11-08 12:39:41 -06:00
Andras Bacsai
2ad634dbc6 refactor: code 2022-11-08 15:51:07 +01:00
Andras Bacsai
de13f65a24 fix: umami template 2022-11-08 15:23:18 +01:00
ThallesP
e038865693 feature: add default to latest commit and support for gitlab 2022-10-25 13:51:10 -03:00
ThallesP
dfd29dc37a feature: initial support for specific git commit 2022-10-25 13:26:03 -03:00
The Mark
4448b86b93 fix: Accept logged and not logged user in /base 2022-10-23 13:31:24 -04:00
413 changed files with 29505 additions and 4300 deletions

View File

@@ -8,7 +8,6 @@ package
.env.* .env.*
!.env.example !.env.example
dist dist
client
apps/api/db/*.db apps/api/db/*.db
local-serve local-serve
apps/api/db/migration.db-journal apps/api/db/migration.db-journal

View File

@@ -9,13 +9,21 @@ body:
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
Thanks for taking the time to fill out this bug report! Please fill the form in English Thanks for taking the time to fill out this bug report! Please fill the form in English.
- type: checkboxes - type: checkboxes
attributes: attributes:
label: Is there an existing issue for this? label: Is there an existing issue for this?
options: options:
- label: I have searched the existing issues - label: I have searched the existing issues
required: true required: true
- type: input
id: repository
attributes:
label: Example public repository
description: "An example public git repository to reproduce the issue easily (if applicable)."
placeholder: "ex: https://github.com/coollabsio/coolify"
validations:
required: false
- type: textarea - type: textarea
attributes: attributes:
label: Description label: Description

View File

@@ -0,0 +1,93 @@
name: fluent-bit-release
on:
push:
paths:
- "others/fluentbit"
- ".github/workflows/fluent-bit-release.yml"
branches:
- next
jobs:
arm64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: others/fluentbit/
platforms: linux/arm64
push: true
tags: coollabsio/coolify-fluent-bit:1.0.0-arm64
amd64:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: others/fluentbit/
platforms: linux/amd64
push: true
tags: coollabsio/coolify-fluent-bit:1.0.0-amd64
aarch64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: others/fluentbit/
platforms: linux/aarch64
push: true
tags: coollabsio/coolify-fluent-bit:1.0.0-aarch64
merge-manifest:
runs-on: ubuntu-latest
needs: [amd64, arm64, aarch64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create & publish manifest
run: |
docker manifest create coollabsio/coolify-fluent-bit:1.0.0 --amend coollabsio/coolify-fluent-bit:1.0.0-amd64 --amend coollabsio/coolify-fluent-bit:1.0.0-arm64 --amend coollabsio/coolify-fluent-bit:1.0.0-aarch64
docker manifest push coollabsio/coolify-fluent-bit:1.0.0

View File

@@ -0,0 +1,91 @@
name: pocketbase-release
on:
push:
paths:
- "others/pocketbase/*"
- ".github/workflows/pocketbase-release.yml"
jobs:
arm64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: others/pocketbase/
platforms: linux/arm64
push: true
tags: coollabsio/pocketbase:0.10.2-arm64
amd64:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: others/pocketbase/
platforms: linux/amd64
push: true
tags: coollabsio/pocketbase:0.10.2-amd64
aarch64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: others/pocketbase/
platforms: linux/aarch64
push: true
tags: coollabsio/pocketbase:0.10.2-aarch64
merge-manifest:
runs-on: ubuntu-latest
needs: [amd64, arm64, aarch64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create & publish manifest
run: |
docker manifest create coollabsio/pocketbase:0.10.2 --amend coollabsio/pocketbase:0.10.2-amd64 --amend coollabsio/pocketbase:0.10.2-arm64 --amend coollabsio/pocketbase:0.10.2-aarch64
docker manifest push coollabsio/pocketbase:0.10.2

View File

@@ -104,7 +104,9 @@ jobs:
- name: Create & publish manifest - name: Create & publish manifest
run: | run: |
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
docker manifest create coollabsio/coolify:latest --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}} docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
docker manifest push coollabsio/coolify:latest
- uses: sarisia/actions-status-discord@v1 - uses: sarisia/actions-status-discord@v1
if: always() if: always()
with: with:

View File

@@ -2,6 +2,12 @@ name: staging-release
on: on:
push: push:
paths:
- "**"
- "!others/fluentbit"
- "!others/pocketbase"
- "!.github/workflows/fluent-bit-release.yml"
- "!.github/workflows/pocketbase-release.yml"
branches: branches:
- next - next

15
.gitignore vendored
View File

@@ -1,17 +1,24 @@
.DS_Store .DS_Store
node_modules node_modules
.pnpm-store .pnpm-store
build /apps/ui/build
/build
.svelte-kit .svelte-kit
package package
.env .env
.env.* .env.*
!.env.example !.env.example
dist dist
client
apps/api/db/*.db apps/api/db/*.db
local-serve
apps/api/db/migration.db-journal apps/api/db/migration.db-journal
apps/api/core* apps/api/core*
logs apps/backup/backups/*
!apps/backup/backups/.gitkeep
/logs
others/certificates others/certificates
backups/*
!backups/.gitkeep
# Trpc
apps/server/db/*.db
apps/server/db/*.db-journal

21
.vscode/settings.json vendored
View File

@@ -1,11 +1,22 @@
{ {
"i18n-ally.localesPaths": ["src/lib/locales"], "i18n-ally.localesPaths": [
"src/lib/locales"
],
"i18n-ally.keystyle": "nested", "i18n-ally.keystyle": "nested",
"i18n-ally.extract.ignoredByFiles": { "i18n-ally.extract.ignoredByFiles": {
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"] "src\\routes\\__layout.svelte": [
"Coolify",
"coolLabs logo"
]
}, },
"i18n-ally.sourceLanguage": "en", "i18n-ally.sourceLanguage": "en",
"i18n-ally.enabledFrameworks": ["svelte"], "i18n-ally.enabledFrameworks": [
"i18n-ally.enabledParsers": ["js", "ts", "json"], "svelte"
],
"i18n-ally.enabledParsers": [
"js",
"ts",
"json"
],
"i18n-ally.extract.autoDetect": true "i18n-ally.extract.autoDetect": true
} }

View File

@@ -34,7 +34,7 @@ You'll need a set of skills to [get started](docs/contribution/GettingStarted.md
```sh ```sh
# Or... Copy and paste commands bellow: # Or... Copy and paste commands bellow:
cp apps/api/.env.example apps/api.env cp apps/api/.env.example apps/api/.env
pnpm install pnpm install
pnpm db:push pnpm db:push
pnpm db:seed pnpm db:seed

View File

@@ -77,6 +77,7 @@ Deploy your resource to:
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a> <a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
### Services ### Services
- [Appwrite](https://appwrite.io) - [Appwrite](https://appwrite.io)
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress) - [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
- [Ghost](https://ghost.org) - [Ghost](https://ghost.org)
@@ -93,23 +94,39 @@ Deploy your resource to:
- [Fider](https://fider.io) - [Fider](https://fider.io)
- [Hasura](https://hasura.io) - [Hasura](https://hasura.io)
- [GlitchTip](https://glitchtip.com) - [GlitchTip](https://glitchtip.com)
- And more...
## Migration from v1
A fresh installation is necessary. v2 and v3 are not compatible with v1.
## Support ## Support
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai) - Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai) - Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
- Email: [andras@coollabs.io](mailto:andras@coollabs.io) - Email: [andras@coollabs.io](mailto:andras@coollabs.io)
- Discord: [Invitation](https://coollabs.io/discord) - Discord: [Invitation](https://coollabs.io/discord)
## Development Contributions ---
Coolify is developed under the Apache License and you can help to make it grow &rarr; [Start coding!](./CONTRIBUTION.md) ## ⚗️ Expertise Contributions
## Financial Contributors Coolify is developed under the [Apache License](./LICENSE) and you can help to make it grow.
Our community will be glad to have you on board!
Learn how to contribute to Coolify as as ...
&rarr; [👩🏾‍💻 Software developer](./CONTRIBUTION.md)
&rarr; [🧑🏻‍🏫 Translator](./docs/contribution/Translating.md)
<!--
&rarr; 🧑🏽‍🎨 Designer
&rarr; 🙋‍♀️ Community Managemer
&rarr; 🧙🏻‍♂️ Text Content Creator
&rarr; 👨🏼‍🎤 Video Content Creator
-->
---
## 💰 Financial Contributors
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)] Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]

View File

@@ -1,10 +1,9 @@
COOLIFY_APP_ID=local-dev COOLIFY_APP_ID=local-dev
# 32 bits long secret key # 32 bits long secret key
COOLIFY_SECRET_KEY=12341234123412341234123412341234 COOLIFY_SECRET_KEY=12341234123412341234123412341234
COOLIFY_DATABASE_URL=file:../db/dev.db COOLIFY_DATABASE_URL=file:../db/dev.db
COOLIFY_SENTRY_DSN=
COOLIFY_IS_ON=docker
COOLIFY_IS_ON=docker COOLIFY_WHITE_LABELED=false
COOLIFY_WHITE_LABELED=false COOLIFY_WHITE_LABELED_ICON=
COOLIFY_WHITE_LABELED_ICON= COOLIFY_AUTO_UPDATE=
COOLIFY_AUTO_UPDATE=

BIN
apps/api/db/dev.db.bak Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,11 @@
{ {
"watch": ["src"], "watch": [
"ignore": ["src/**/*.test.ts"], "src"
"ext": "ts,mjs,json,graphql", ],
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build", "ignore": [
"legacyWatch": true "src/**/*.test.ts"
} ],
"ext": "ts,mjs,json,graphql",
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --platform=node --outdir=build --format=cjs && node build",
"legacyWatch": true
}

View File

@@ -1,80 +1,85 @@
{ {
"name": "api", "name": "api",
"description": "Coolify's Fastify API", "description": "Coolify's Fastify API",
"license": "Apache-2.0", "license": "Apache-2.0",
"scripts": { "scripts": {
"db:generate": "prisma generate", "db:generate": "prisma generate",
"db:push": "prisma db push && prisma generate", "db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed", "db:seed": "prisma db seed",
"db:studio": "prisma studio", "db:studio": "prisma studio",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name", "db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"dev": "nodemon", "dev": "nodemon",
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --minify=true --platform=node --outdir=build --format=cjs", "build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
"format": "prettier --write 'src/**/*.{js,ts,json,md}'", "format": "prettier --write 'src/**/*.{js,ts,json,md}'",
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .", "lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js" "start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
}, },
"dependencies": { "dependencies": {
"@breejs/ts-worker": "2.0.0", "@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.4.1", "@fastify/autoload": "5.5.0",
"@fastify/cookie": "8.3.0", "@fastify/cookie": "8.3.0",
"@fastify/cors": "8.1.1", "@fastify/cors": "8.2.0",
"@fastify/env": "4.1.0", "@fastify/env": "4.1.0",
"@fastify/jwt": "6.3.2", "@fastify/jwt": "6.3.3",
"@fastify/multipart": "7.3.0", "@fastify/multipart": "7.3.0",
"@fastify/static": "6.5.0", "@fastify/static": "6.5.1",
"@iarna/toml": "2.2.5", "@iarna/toml": "2.2.5",
"@ladjs/graceful": "3.0.2", "@ladjs/graceful": "3.0.2",
"@prisma/client": "4.5.0", "@prisma/client": "4.6.1",
"bcryptjs": "2.4.3", "@sentry/node": "7.21.1",
"bree": "9.1.2", "@sentry/tracing": "7.21.1",
"cabin": "9.1.2", "axe": "11.0.0",
"compare-versions": "5.0.1", "bcryptjs": "2.4.3",
"csv-parse": "5.3.1", "bree": "9.1.2",
"csvtojson": "2.0.10", "cabin": "11.0.1",
"cuid": "2.1.8", "compare-versions": "5.0.1",
"dayjs": "1.11.6", "csv-parse": "5.3.2",
"dockerode": "3.3.4", "csvtojson": "2.0.10",
"dotenv-extended": "2.9.0", "cuid": "2.1.8",
"execa": "6.1.0", "dayjs": "1.11.6",
"fastify": "4.9.2", "dockerode": "3.3.4",
"fastify-plugin": "4.3.0", "dotenv-extended": "2.9.0",
"fastify-socket.io": "4.0.0", "execa": "6.1.0",
"generate-password": "1.7.0", "fastify": "4.10.2",
"got": "12.5.2", "fastify-plugin": "4.3.0",
"is-ip": "5.0.0", "fastify-socket.io": "4.0.0",
"is-port-reachable": "4.0.0", "generate-password": "1.7.0",
"js-yaml": "4.1.0", "got": "12.5.3",
"jsonwebtoken": "8.5.1", "is-ip": "5.0.0",
"node-forge": "1.3.1", "is-port-reachable": "4.0.0",
"node-os-utils": "1.3.7", "js-yaml": "4.1.0",
"p-all": "4.0.0", "jsonwebtoken": "8.5.1",
"p-throttle": "5.0.0", "minimist": "^1.2.7",
"prisma": "4.5.0", "node-forge": "1.3.1",
"public-ip": "6.0.1", "node-os-utils": "1.3.7",
"pump": "3.0.0", "p-all": "4.0.0",
"socket.io": "4.5.3", "p-throttle": "5.0.0",
"ssh-config": "4.1.6", "prisma": "4.6.1",
"strip-ansi": "7.0.1", "public-ip": "6.0.1",
"unique-names-generator": "4.7.1" "pump": "3.0.0",
}, "shell-quote": "^1.7.4",
"devDependencies": { "socket.io": "4.5.3",
"@types/node": "18.11.6", "ssh-config": "4.1.6",
"@types/node-os-utils": "1.3.0", "strip-ansi": "7.0.1",
"@typescript-eslint/eslint-plugin": "5.41.0", "unique-names-generator": "4.7.1"
"@typescript-eslint/parser": "5.41.0", },
"esbuild": "0.15.12", "devDependencies": {
"eslint": "8.26.0", "@types/node": "18.11.9",
"eslint-config-prettier": "8.5.0", "@types/node-os-utils": "1.3.0",
"eslint-plugin-prettier": "4.2.1", "@typescript-eslint/eslint-plugin": "5.44.0",
"nodemon": "2.0.20", "@typescript-eslint/parser": "5.44.0",
"prettier": "2.7.1", "esbuild": "0.15.15",
"rimraf": "3.0.2", "eslint": "8.28.0",
"tsconfig-paths": "4.1.0", "eslint-config-prettier": "8.5.0",
"types-fastify-socket.io": "0.0.1", "eslint-plugin-prettier": "4.2.1",
"typescript": "4.8.4" "nodemon": "2.0.20",
}, "prettier": "2.7.1",
"prisma": { "rimraf": "3.0.2",
"seed": "node prisma/seed.js" "tsconfig-paths": "4.1.0",
} "types-fastify-socket.io": "0.0.1",
} "typescript": "4.9.3"
},
"prisma": {
"seed": "node prisma/seed.js"
}
}

View File

@@ -0,0 +1,45 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"proxyDefaultRedirect" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"DNSServers" TEXT,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false
);
INSERT INTO "new_Setting" ("DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
CREATE TABLE "new_ApplicationPersistentStorage" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"path" TEXT NOT NULL,
"oldPath" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationPersistentStorage" ("applicationId", "createdAt", "id", "path", "updatedAt") SELECT "applicationId", "createdAt", "id", "path", "updatedAt" FROM "ApplicationPersistentStorage";
DROP TABLE "ApplicationPersistentStorage";
ALTER TABLE "new_ApplicationPersistentStorage" RENAME TO "ApplicationPersistentStorage";
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,37 @@
/*
Warnings:
- You are about to drop the column `proxyHash` on the `Setting` table. All the data in the column will be lost.
- You are about to drop the column `proxyPassword` on the `Setting` table. All the data in the column will be lost.
- You are about to drop the column `proxyUser` on the `Setting` table. All the data in the column will be lost.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,59 @@
-- CreateTable
CREATE TABLE "DockerRegistry" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"url" TEXT NOT NULL,
"username" TEXT,
"password" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Application" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"fqdn" TEXT,
"repository" TEXT,
"configHash" TEXT,
"branch" TEXT,
"buildPack" TEXT,
"projectId" INTEGER,
"port" INTEGER,
"exposePort" INTEGER,
"installCommand" TEXT,
"buildCommand" TEXT,
"startCommand" TEXT,
"baseDirectory" TEXT,
"publishDirectory" TEXT,
"deploymentType" TEXT,
"phpModules" TEXT,
"pythonWSGI" TEXT,
"pythonModule" TEXT,
"pythonVariable" TEXT,
"dockerFileLocation" TEXT,
"denoMainFile" TEXT,
"denoOptions" TEXT,
"dockerComposeFile" TEXT,
"dockerComposeFileLocation" TEXT,
"dockerComposeConfiguration" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"destinationDockerId" TEXT,
"gitSourceId" TEXT,
"baseImage" TEXT,
"baseBuildImage" TEXT,
"dockerRegistryId" TEXT NOT NULL DEFAULT '0',
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
DROP TABLE "Application";
ALTER TABLE "new_Application" RENAME TO "Application";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,30 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,60 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", coalesce("isAPIDebuggingEnabled", false) AS "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
CREATE TABLE "new_GlitchTip" (
"id" TEXT NOT NULL PRIMARY KEY,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"secretKeyBase" TEXT,
"defaultEmail" TEXT NOT NULL,
"defaultUsername" TEXT NOT NULL,
"defaultPassword" TEXT NOT NULL,
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
"emailSmtpPort" INTEGER DEFAULT 25,
"emailSmtpUser" TEXT,
"emailSmtpPassword" TEXT,
"emailSmtpUseTls" BOOLEAN NOT NULL DEFAULT false,
"emailSmtpUseSsl" BOOLEAN NOT NULL DEFAULT false,
"emailBackend" TEXT,
"mailgunApiKey" TEXT,
"sendgridApiKey" TEXT,
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
"serviceId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_GlitchTip" ("createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUseSsl", "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt") SELECT "createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", coalesce("emailSmtpUseSsl", false) AS "emailSmtpUseSsl", coalesce("emailSmtpUseTls", false) AS "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt" FROM "GlitchTip";
DROP TABLE "GlitchTip";
ALTER TABLE "new_GlitchTip" RENAME TO "GlitchTip";
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Setting" ADD COLUMN "sentryDSN" TEXT;

View File

@@ -0,0 +1,31 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"sentryDSN" TEXT,
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT coalesce("DNSServers", '1.1.1.1,8.8.8.8') AS "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,33 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"numberOfDockerImagesKeptLocally" INTEGER NOT NULL DEFAULT 3,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"sentryDSN" TEXT,
"previewSeparator" TEXT NOT NULL DEFAULT '.',
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", 3, "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "gitCommitHash" TEXT;

View File

@@ -0,0 +1,66 @@
/*
Warnings:
- You are about to drop the column `isSystemWide` on the `DockerRegistry` table. All the data in the column will be lost.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_DockerRegistry" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"url" TEXT NOT NULL,
"username" TEXT,
"password" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_DockerRegistry" ("createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username") SELECT "createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username" FROM "DockerRegistry";
DROP TABLE "DockerRegistry";
ALTER TABLE "new_DockerRegistry" RENAME TO "DockerRegistry";
CREATE TABLE "new_Application" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"fqdn" TEXT,
"repository" TEXT,
"configHash" TEXT,
"branch" TEXT,
"buildPack" TEXT,
"projectId" INTEGER,
"port" INTEGER,
"exposePort" INTEGER,
"installCommand" TEXT,
"buildCommand" TEXT,
"startCommand" TEXT,
"baseDirectory" TEXT,
"publishDirectory" TEXT,
"deploymentType" TEXT,
"phpModules" TEXT,
"pythonWSGI" TEXT,
"pythonModule" TEXT,
"pythonVariable" TEXT,
"dockerFileLocation" TEXT,
"denoMainFile" TEXT,
"denoOptions" TEXT,
"dockerComposeFile" TEXT,
"dockerComposeFileLocation" TEXT,
"dockerComposeConfiguration" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"destinationDockerId" TEXT,
"gitSourceId" TEXT,
"gitCommitHash" TEXT,
"baseImage" TEXT,
"baseBuildImage" TEXT,
"dockerRegistryId" TEXT,
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
DROP TABLE "Application";
ALTER TABLE "new_Application" RENAME TO "Application";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "simpleDockerfile" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerRegistryImageName" TEXT;

View File

@@ -19,27 +19,29 @@ model Certificate {
} }
model Setting { model Setting {
id String @id @default(cuid()) id String @id @default(cuid())
fqdn String? @unique fqdn String? @unique
isAPIDebuggingEnabled Boolean? @default(false) dualCerts Boolean @default(false)
isRegistrationEnabled Boolean @default(false) minPort Int @default(9000)
dualCerts Boolean @default(false) maxPort Int @default(9100)
minPort Int @default(9000) DNSServers String @default("1.1.1.1,8.8.8.8")
maxPort Int @default(9100) ipv4 String?
proxyPassword String ipv6 String?
proxyUser String arch String?
proxyHash String? concurrentBuilds Int @default(1)
proxyDefaultRedirect String? applicationStoragePathMigrationFinished Boolean @default(false)
isAutoUpdateEnabled Boolean @default(false) numberOfDockerImagesKeptLocally Int @default(3)
isDNSCheckEnabled Boolean @default(true) proxyDefaultRedirect String?
DNSServers String? doNotTrack Boolean @default(false)
isTraefikUsed Boolean @default(true) sentryDSN String?
createdAt DateTime @default(now()) previewSeparator String @default(".")
updatedAt DateTime @updatedAt isAPIDebuggingEnabled Boolean @default(false)
ipv4 String? isRegistrationEnabled Boolean @default(true)
ipv6 String? isAutoUpdateEnabled Boolean @default(false)
arch String? isDNSCheckEnabled Boolean @default(true)
concurrentBuilds Int @default(1) isTraefikUsed Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
} }
model User { model User {
@@ -82,6 +84,7 @@ model Team {
service Service[] service Service[]
users User[] users User[]
certificate Certificate[] certificate Certificate[]
dockerRegistry DockerRegistry[]
} }
model TeamInvitation { model TeamInvitation {
@@ -95,7 +98,7 @@ model TeamInvitation {
} }
model Application { model Application {
id String @id @default(cuid()) id String @id @default(cuid())
name String name String
fqdn String? fqdn String?
repository String? repository String?
@@ -121,20 +124,26 @@ model Application {
dockerComposeFile String? dockerComposeFile String?
dockerComposeFileLocation String? dockerComposeFileLocation String?
dockerComposeConfiguration String? dockerComposeConfiguration String?
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
destinationDockerId String? destinationDockerId String?
gitSourceId String? gitSourceId String?
gitCommitHash String?
baseImage String? baseImage String?
baseBuildImage String? baseBuildImage String?
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
persistentStorage ApplicationPersistentStorage[]
settings ApplicationSettings? settings ApplicationSettings?
secrets Secret[] dockerRegistryId String?
teams Team[] dockerRegistryImageName String?
connectedDatabase ApplicationConnectedDatabase? simpleDockerfile String?
previewApplication PreviewApplication[]
persistentStorage ApplicationPersistentStorage[]
secrets Secret[]
teams Team[]
connectedDatabase ApplicationConnectedDatabase?
previewApplication PreviewApplication[]
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
dockerRegistry DockerRegistry? @relation(fields: [dockerRegistryId], references: [id])
} }
model PreviewApplication { model PreviewApplication {
@@ -186,6 +195,7 @@ model ApplicationPersistentStorage {
id String @id @default(cuid()) id String @id @default(cuid())
applicationId String applicationId String
path String path String
oldPath Boolean @default(false)
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
application Application @relation(fields: [applicationId], references: [id]) application Application @relation(fields: [applicationId], references: [id])
@@ -294,6 +304,19 @@ model SshKey {
destinationDocker DestinationDocker[] destinationDocker DestinationDocker[]
} }
model DockerRegistry {
id String @id @default(cuid())
name String
url String
username String?
password String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
teamId String?
team Team? @relation(fields: [teamId], references: [id])
application Application[]
}
model GitSource { model GitSource {
id String @id @default(cuid()) id String @id @default(cuid())
name String name String
@@ -624,8 +647,8 @@ model GlitchTip {
emailSmtpPort Int? @default(25) emailSmtpPort Int? @default(25)
emailSmtpUser String? emailSmtpUser String?
emailSmtpPassword String? emailSmtpPassword String?
emailSmtpUseTls Boolean? @default(false) emailSmtpUseTls Boolean @default(false)
emailSmtpUseSsl Boolean? @default(false) emailSmtpUseSsl Boolean @default(false)
emailBackend String? emailBackend String?
mailgunApiKey String? mailgunApiKey String?
sendgridApiKey String? sendgridApiKey String?

View File

@@ -1,18 +1,8 @@
const dotEnvExtended = require('dotenv-extended'); const dotEnvExtended = require('dotenv-extended');
dotEnvExtended.load(); dotEnvExtended.load();
const crypto = require('crypto'); const crypto = require('crypto');
const generator = require('generate-password');
const cuid = require('cuid');
const { PrismaClient } = require('@prisma/client'); const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient(); const prisma = new PrismaClient();
function generatePassword(length = 24) {
return generator.generate({
length,
numbers: true,
strict: true
});
}
const algorithm = 'aes-256-ctr'; const algorithm = 'aes-256-ctr';
async function main() { async function main() {
@@ -21,11 +11,8 @@ async function main() {
if (!settingsFound) { if (!settingsFound) {
await prisma.setting.create({ await prisma.setting.create({
data: { data: {
isRegistrationEnabled: true, id: '0',
proxyPassword: encrypt(generatePassword()),
proxyUser: cuid(),
arch: process.arch, arch: process.arch,
DNSServers: '1.1.1.1,8.8.8.8'
} }
}); });
} else { } else {
@@ -34,11 +21,11 @@ async function main() {
id: settingsFound.id id: settingsFound.id
}, },
data: { data: {
isTraefikUsed: true, id: '0'
proxyHash: null
} }
}); });
} }
// Create local docker engine
const localDocker = await prisma.destinationDocker.findFirst({ const localDocker = await prisma.destinationDocker.findFirst({
where: { engine: '/var/run/docker.sock' } where: { engine: '/var/run/docker.sock' }
}); });
@@ -55,23 +42,18 @@ async function main() {
// Set auto-update based on env variable // Set auto-update based on env variable
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true'; const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
const settings = await prisma.setting.findFirst({}); await prisma.setting.update({
if (settings) { where: {
await prisma.setting.update({ id: '0'
where: { },
id: settings.id data: {
}, isAutoUpdateEnabled
data: { }
isAutoUpdateEnabled });
} // Create public github source
});
}
const github = await prisma.gitSource.findFirst({ const github = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://github.com', forPublic: true } where: { htmlUrl: 'https://github.com', forPublic: true }
}); });
const gitlab = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
});
if (!github) { if (!github) {
await prisma.gitSource.create({ await prisma.gitSource.create({
data: { data: {
@@ -83,6 +65,10 @@ async function main() {
} }
}); });
} }
// Create public gitlab source
const gitlab = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
});
if (!gitlab) { if (!gitlab) {
await prisma.gitSource.create({ await prisma.gitSource.create({
data: { data: {

View File

@@ -6,39 +6,52 @@ import cookie from '@fastify/cookie';
import multipart from '@fastify/multipart'; import multipart from '@fastify/multipart';
import path, { join } from 'path'; import path, { join } from 'path';
import autoLoad from '@fastify/autoload'; import autoLoad from '@fastify/autoload';
import socketIO from 'fastify-socket.io' import socketIO from 'fastify-socket.io';
import socketIOServer from './realtime' import socketIOServer from './realtime';
import { asyncExecShell, cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, encrypt, executeDockerCmd, executeSSHCmd, generateDatabaseConfiguration, isDev, listSettings, prisma, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common'; import {
cleanupDockerStorage,
createRemoteEngineConfiguration,
decrypt,
executeCommand,
generateDatabaseConfiguration,
isDev,
listSettings,
prisma,
sentryDSN,
startTraefikProxy,
startTraefikTCPProxy,
version
} from './lib/common';
import { scheduler } from './lib/scheduler'; import { scheduler } from './lib/scheduler';
import { compareVersions } from 'compare-versions'; import { compareVersions } from 'compare-versions';
import Graceful from '@ladjs/graceful' import Graceful from '@ladjs/graceful';
import yaml from 'js-yaml' import yaml from 'js-yaml';
import fs from 'fs/promises'; import fs from 'fs/promises';
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers'; import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
import { checkContainer } from './lib/docker'; import { checkContainer } from './lib/docker';
import { migrateServicesToNewTemplate } from './lib'; import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers'; import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
import * as Sentry from '@sentry/node';
declare module 'fastify' { declare module 'fastify' {
interface FastifyInstance { interface FastifyInstance {
config: { config: {
COOLIFY_APP_ID: string, COOLIFY_APP_ID: string;
COOLIFY_SECRET_KEY: string, COOLIFY_SECRET_KEY: string;
COOLIFY_DATABASE_URL: string, COOLIFY_DATABASE_URL: string;
COOLIFY_SENTRY_DSN: string, COOLIFY_IS_ON: string;
COOLIFY_IS_ON: string, COOLIFY_WHITE_LABELED: string;
COOLIFY_WHITE_LABELED: string, COOLIFY_WHITE_LABELED_ICON: string | null;
COOLIFY_WHITE_LABELED_ICON: string | null, COOLIFY_AUTO_UPDATE: string;
COOLIFY_AUTO_UPDATE: string,
}; };
} }
} }
const port = isDev ? 3001 : 3000; const port = isDev ? 3001 : 3000;
const host = '0.0.0.0'; const host = '0.0.0.0';
(async () => { (async () => {
const settings = await prisma.setting.findFirst() const settings = await prisma.setting.findFirst();
const fastify = Fastify({ const fastify = Fastify({
logger: settings?.isAPIDebuggingEnabled || false, logger: settings?.isAPIDebuggingEnabled || false,
trustProxy: true trustProxy: true
@@ -49,19 +62,15 @@ const host = '0.0.0.0';
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'], required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
properties: { properties: {
COOLIFY_APP_ID: { COOLIFY_APP_ID: {
type: 'string', type: 'string'
}, },
COOLIFY_SECRET_KEY: { COOLIFY_SECRET_KEY: {
type: 'string', type: 'string'
}, },
COOLIFY_DATABASE_URL: { COOLIFY_DATABASE_URL: {
type: 'string', type: 'string',
default: 'file:../db/dev.db' default: 'file:../db/dev.db'
}, },
COOLIFY_SENTRY_DSN: {
type: 'string',
default: null
},
COOLIFY_IS_ON: { COOLIFY_IS_ON: {
type: 'string', type: 'string',
default: 'docker' default: 'docker'
@@ -77,8 +86,7 @@ const host = '0.0.0.0';
COOLIFY_AUTO_UPDATE: { COOLIFY_AUTO_UPDATE: {
type: 'string', type: 'string',
default: 'false' default: 'false'
}, }
} }
}; };
const options = { const options = {
@@ -107,14 +115,13 @@ const host = '0.0.0.0';
fastify.register(autoLoad, { fastify.register(autoLoad, {
dir: join(__dirname, 'routes') dir: join(__dirname, 'routes')
}); });
fastify.register(cookie) fastify.register(cookie);
fastify.register(cors); fastify.register(cors);
fastify.register(socketIO, { fastify.register(socketIO, {
cors: { cors: {
origin: isDev ? "*" : '' origin: isDev ? '*' : ''
} }
}) });
// To detect allowed origins // To detect allowed origins
// fastify.addHook('onRequest', async (request, reply) => { // fastify.addHook('onRequest', async (request, reply) => {
// console.log(request.headers.host) // console.log(request.headers.host)
@@ -136,13 +143,13 @@ const host = '0.0.0.0';
// } // }
// }) // })
try { try {
await fastify.listen({ port, host }) await fastify.listen({ port, host });
await socketIOServer(fastify) await socketIOServer(fastify);
console.log(`Coolify's API is listening on ${host}:${port}`); console.log(`Coolify's API is listening on ${host}:${port}`);
migrateServicesToNewTemplate() migrateServicesToNewTemplate();
await migrateApplicationPersistentStorage();
await initServer(); await initServer();
const graceful = new Graceful({ brees: [scheduler] }); const graceful = new Graceful({ brees: [scheduler] });
@@ -152,116 +159,142 @@ const host = '0.0.0.0';
if (!scheduler.workers.has('deployApplication')) { if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication'); scheduler.run('deployApplication');
} }
}, 2000) }, 60000 * 15);
// autoUpdater // autoUpdater
setInterval(async () => { setInterval(async () => {
await autoUpdater() await autoUpdater();
}, 60000 * 15) }, 60000 * 15);
// cleanupStorage // cleanupStorage
setInterval(async () => { setInterval(async () => {
await cleanupStorage() await cleanupStorage();
}, 60000 * 10) }, 60000 * 15);
// checkProxies, checkFluentBit & refresh templates // checkProxies, checkFluentBit & refresh templates
setInterval(async () => { setInterval(async () => {
await checkProxies(); await checkProxies();
await checkFluentBit(); await checkFluentBit();
}, 60000) }, 60000);
// Refresh and check templates // Refresh and check templates
setInterval(async () => { setInterval(async () => {
await refreshTemplates() await refreshTemplates();
}, 60000) }, 60000);
setInterval(async () => { setInterval(async () => {
await refreshTags() await refreshTags();
}, 60000) }, 60000);
setInterval(async () => { setInterval(
await migrateServicesToNewTemplate() async () => {
}, 60000) await migrateServicesToNewTemplate();
},
isDev ? 10000 : 60000
);
setInterval(async () => { setInterval(async () => {
await copySSLCertificates(); await copySSLCertificates();
}, 10000) }, 10000);
await Promise.all([
getTagsTemplates(),
getArch(),
getIPAddress(),
configureRemoteDockers(),
])
await Promise.all([getTagsTemplates(), getArch(), getIPAddress(), configureRemoteDockers()]);
} catch (error) { } catch (error) {
console.error(error); console.error(error);
process.exit(1); process.exit(1);
} }
})(); })();
async function getIPAddress() { async function getIPAddress() {
const { publicIpv4, publicIpv6 } = await import('public-ip') const { publicIpv4, publicIpv6 } = await import('public-ip');
try { try {
const settings = await listSettings(); const settings = await listSettings();
if (!settings.ipv4) { if (!settings.ipv4) {
const ipv4 = await publicIpv4({ timeout: 2000 });
console.log(`Getting public IPv4 address...`); console.log(`Getting public IPv4 address...`);
const ipv4 = await publicIpv4({ timeout: 2000 }) await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } });
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
} }
if (!settings.ipv6) { if (!settings.ipv6) {
const ipv6 = await publicIpv6({ timeout: 2000 });
console.log(`Getting public IPv6 address...`); console.log(`Getting public IPv6 address...`);
const ipv6 = await publicIpv6({ timeout: 2000 }) await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } });
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
} }
} catch (error) {}
} catch (error) { }
} }
async function getTagsTemplates() { async function getTagsTemplates() {
const { default: got } = await import('got') const { default: got } = await import('got');
try { try {
if (isDev) { if (isDev) {
const templates = await fs.readFile('./devTemplates.yaml', 'utf8') const templates = await fs.readFile('./devTemplates.yaml', 'utf8');
const tags = await fs.readFile('./devTags.json', 'utf8') const tags = await fs.readFile('./devTags.json', 'utf8');
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates))) await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)));
await fs.writeFile('./tags.json', tags) await fs.writeFile('./tags.json', tags);
console.log('Tags and templates loaded in dev mode...') console.log('[004] Tags and templates loaded in dev mode...');
} else { } else {
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text() const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text();
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text() const response = await got
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response))) .get('https://get.coollabs.io/coolify/service-templates.yaml')
await fs.writeFile('/app/tags.json', tags) .text();
console.log('Tags and templates loaded...') await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)));
await fs.writeFile('/app/tags.json', tags);
console.log('[004] Tags and templates loaded...');
} }
} catch (error) { } catch (error) {
console.log("Couldn't get latest templates.") console.log("Couldn't get latest templates.");
console.log(error) console.log(error);
} }
} }
async function initServer() { async function initServer() {
const appId = process.env['COOLIFY_APP_ID'];
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
try { try {
console.log(`Initializing server...`); if (settings.doNotTrack === true) {
await asyncExecShell(`docker network create --attachable coolify`); console.log('[000] Telemetry disabled...');
} catch (error) { } } else {
if (settings.sentryDSN !== sentryDSN) {
await prisma.setting.update({ where: { id: '0' }, data: { sentryDSN } });
}
// Initialize Sentry
// Sentry.init({
// dsn: sentryDSN,
// environment: isDev ? 'development' : 'production',
// release: version
// });
// console.log('[000] Sentry initialized...')
}
} catch (error) {
console.error(error);
}
try { try {
console.log(`[001] Initializing server...`);
await executeCommand({ command: `docker network create --attachable coolify` });
} catch (error) {}
try {
console.log(`[002] Cleanup stucked builds...`);
const isOlder = compareVersions('3.8.1', version); const isOlder = compareVersions('3.8.1', version);
if (isOlder === 1) { if (isOlder === 1) {
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } }); await prisma.build.updateMany({
where: { status: { in: ['running', 'queued'] } },
data: { status: 'failed' }
});
} }
} catch (error) { } } catch (error) {}
try {
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
await fs.rm('/tmp/build-sources', { recursive: true, force: true });
} catch (error) {
console.log(error);
}
} }
async function getArch() { async function getArch() {
try { try {
const settings = await prisma.setting.findFirst({}) const settings = await prisma.setting.findFirst({});
if (settings && !settings.arch) { if (settings && !settings.arch) {
console.log(`Getting architecture...`); console.log(`Getting architecture...`);
await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } }) await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } });
} }
} catch (error) { } } catch (error) {}
} }
async function configureRemoteDockers() { async function configureRemoteDockers() {
@@ -272,41 +305,44 @@ async function configureRemoteDockers() {
if (remoteDocker.length > 0) { if (remoteDocker.length > 0) {
console.log(`Verifying Remote Docker Engines...`); console.log(`Verifying Remote Docker Engines...`);
for (const docker of remoteDocker) { for (const docker of remoteDocker) {
console.log('Verifying:', docker.remoteIpAddress) console.log('Verifying:', docker.remoteIpAddress);
await verifyRemoteDockerEngineFn(docker.id); await verifyRemoteDockerEngineFn(docker.id);
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
} }
async function autoUpdater() { async function autoUpdater() {
try { try {
const { default: got } = await import('got') const { default: got } = await import('got');
const currentVersion = version; const currentVersion = version;
const { coolify } = await got.get('https://get.coollabs.io/versions.json', { const { coolify } = await got
searchParams: { .get('https://get.coollabs.io/versions.json', {
appId: process.env['COOLIFY_APP_ID'] || undefined, searchParams: {
version: currentVersion appId: process.env['COOLIFY_APP_ID'] || undefined,
} version: currentVersion
}).json() }
})
.json();
const latestVersion = coolify.main.version; const latestVersion = coolify.main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion); const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) { if (isUpdateAvailable === 1) {
const activeCount = 0 const activeCount = 0;
if (activeCount === 0) { if (activeCount === 0) {
if (!isDev) { if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst(); const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) { if (isAutoUpdateEnabled) {
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`); await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
await asyncExecShell(`env | grep '^COOLIFY' > .env`); await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
await asyncExecShell( await executeCommand({
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
); });
await asyncExecShell( await executeCommand({
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` shell: true,
); command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
});
} }
} else { } else {
console.log('Updating (not really in dev mode).'); console.log('Updating (not really in dev mode).');
@@ -314,7 +350,7 @@ async function autoUpdater() {
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
} }
@@ -325,14 +361,18 @@ async function checkFluentBit() {
const { id } = await prisma.destinationDocker.findFirst({ const { id } = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify' } where: { engine, network: 'coolify' }
}); });
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true }); const { found } = await checkContainer({
dockerId: id,
container: 'coolify-fluentbit',
remove: true
});
if (!found) { if (!found) {
await asyncExecShell(`env | grep '^COOLIFY' > .env`); await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
await asyncExecShell(`docker compose up -d fluent-bit`); await executeCommand({ command: `docker compose up -d fluent-bit` });
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
} }
async function checkProxies() { async function checkProxies() {
@@ -348,7 +388,7 @@ async function checkProxies() {
where: { engine, network: 'coolify', isCoolifyProxyUsed: true } where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
}); });
if (localDocker) { if (localDocker) {
portReachable = await isReachable(80, { host: ipv4 || ipv6 }) portReachable = await isReachable(80, { host: ipv4 || ipv6 });
if (!portReachable) { if (!portReachable) {
await startTraefikProxy(localDocker.id); await startTraefikProxy(localDocker.id);
} }
@@ -360,14 +400,14 @@ async function checkProxies() {
if (remoteDocker.length > 0) { if (remoteDocker.length > 0) {
for (const docker of remoteDocker) { for (const docker of remoteDocker) {
if (docker.isCoolifyProxyUsed) { if (docker.isCoolifyProxyUsed) {
portReachable = await isReachable(80, { host: docker.remoteIpAddress }) portReachable = await isReachable(80, { host: docker.remoteIpAddress });
if (!portReachable) { if (!portReachable) {
await startTraefikProxy(docker.id); await startTraefikProxy(docker.id);
} }
} }
try { try {
await createRemoteEngineConfiguration(docker.id) await createRemoteEngineConfiguration(docker.id);
} catch (error) { } } catch (error) {}
} }
} }
// TCP Proxies // TCP Proxies
@@ -406,80 +446,106 @@ async function checkProxies() {
// await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000); // await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
// } // }
// } // }
} catch (error) { } catch (error) {}
}
} }
async function copySSLCertificates() { async function copySSLCertificates() {
try { try {
const pAll = await import('p-all'); const pAll = await import('p-all');
const actions = [] const actions = [];
const certificates = await prisma.certificate.findMany({ include: { team: true } }) const certificates = await prisma.certificate.findMany({ include: { team: true } });
const teamIds = certificates.map(c => c.teamId) const teamIds = certificates.map((c) => c.teamId);
const destinations = await prisma.destinationDocker.findMany({ where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } } }) const destinations = await prisma.destinationDocker.findMany({
where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } }
});
for (const certificate of certificates) { for (const certificate of certificates) {
const { id, key, cert } = certificate const { id, key, cert } = certificate;
const decryptedKey = decrypt(key) const decryptedKey = decrypt(key);
await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey) await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey);
await fs.writeFile(`/tmp/${id}-cert.pem`, cert) await fs.writeFile(`/tmp/${id}-cert.pem`, cert);
for (const destination of destinations) { for (const destination of destinations) {
if (destination.remoteEngine) { if (destination.remoteEngine) {
if (destination.remoteVerified) { if (destination.remoteVerified) {
const { id: dockerId, remoteIpAddress } = destination const { id: dockerId, remoteIpAddress } = destination;
actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress)) actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress));
} }
} else { } else {
actions.push(async () => copyLocalCertificates(id)) actions.push(async () => copyLocalCertificates(id));
} }
} }
} }
await pAll.default(actions, { concurrency: 1 }) await pAll.default(actions, { concurrency: 1 });
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} finally { } finally {
await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`) await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` });
} }
} }
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) { async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
try { try {
await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`) await executeCommand({
await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` }) command: `scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` }) });
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` }) await executeCommand({
sshCommand: true,
shell: true,
dockerId,
command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`
});
await executeCommand({
sshCommand: true,
dockerId,
command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`
});
await executeCommand({
sshCommand: true,
dockerId,
command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`
});
} catch (error) { } catch (error) {
console.log({ error }) console.log({ error });
} }
} }
async function copyLocalCertificates(id: string) { async function copyLocalCertificates(id: string) {
try { try {
await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`) await executeCommand({
await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`) command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`,
await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`) shell: true
});
await executeCommand({
command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`
});
await executeCommand({
command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`
});
} catch (error) { } catch (error) {
console.log({ error }) console.log({ error });
} }
} }
async function cleanupStorage() { async function cleanupStorage() {
const destinationDockers = await prisma.destinationDocker.findMany(); const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set() let enginesDone = new Set();
for (const destination of destinationDockers) { for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return;
if (destination.engine) enginesDone.add(destination.engine) if (destination.engine) enginesDone.add(destination.engine);
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress) if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress);
let force = false;
let lowDiskSpace = false; let lowDiskSpace = false;
try { try {
let stdout = null let stdout = null;
if (!isDev) { if (!isDev) {
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` }) const output = await executeCommand({
dockerId: destination.id,
command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`,
shell: true
});
stdout = output.stdout; stdout = output.stdout;
} else { } else {
const output = await asyncExecShell( const output = await executeCommand({
`df -kPT /` command: `df -kPT /`
); });
stdout = output.stdout; stdout = output.stdout;
} }
let lines = stdout.trim().split('\n'); let lines = stdout.trim().split('\n');
@@ -510,7 +576,7 @@ async function cleanupStorage() {
lowDiskSpace = true; lowDiskSpace = true;
} }
} }
} catch (error) { } } catch (error) {}
await cleanupDockerStorage(destination.id, lowDiskSpace, false) await cleanupDockerStorage(destination.id, lowDiskSpace, force);
} }
} }

View File

@@ -3,8 +3,26 @@ import crypto from 'crypto';
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common'; import {
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma, decryptApplication } from '../lib/common'; copyBaseConfigurationFiles,
makeLabelForSimpleDockerfile,
makeLabelForStandaloneApplication,
saveBuildLog,
saveDockerRegistryCredentials,
setDefaultConfiguration
} from '../lib/buildPacks/common';
import {
createDirectories,
decrypt,
defaultComposeConfiguration,
getDomain,
prisma,
decryptApplication,
isDev,
pushToRegistry,
executeCommand,
generateSecrets
} from '../lib/common';
import * as importers from '../lib/importers'; import * as importers from '../lib/importers';
import * as buildpacks from '../lib/buildPacks'; import * as buildpacks from '../lib/buildPacks';
@@ -14,80 +32,316 @@ import * as buildpacks from '../lib/buildPacks';
if (message === 'error') throw new Error('oops'); if (message === 'error') throw new Error('oops');
if (message === 'cancel') { if (message === 'cancel') {
parentPort.postMessage('cancelled'); parentPort.postMessage('cancelled');
await prisma.$disconnect() await prisma.$disconnect();
process.exit(0); process.exit(0);
} }
}); });
const pThrottle = await import('p-throttle') const pThrottle = await import('p-throttle');
const throttle = pThrottle.default({ const throttle = pThrottle.default({
limit: 1, limit: 1,
interval: 2000 interval: 2000
}); });
const th = throttle(async () => { const th = throttle(async () => {
try { try {
const queuedBuilds = await prisma.build.findMany({ where: { status: { in: ['queued', 'running'] } }, orderBy: { createdAt: 'asc' } }); const queuedBuilds = await prisma.build.findMany({
const { concurrentBuilds } = await prisma.setting.findFirst({}) where: { status: { in: ['queued', 'running'] } },
orderBy: { createdAt: 'asc' }
});
const { concurrentBuilds } = await prisma.setting.findFirst({});
if (queuedBuilds.length > 0) { if (queuedBuilds.length > 0) {
parentPort.postMessage({ deploying: true }); parentPort.postMessage({ deploying: true });
const concurrency = concurrentBuilds; const concurrency = concurrentBuilds;
const pAll = await import('p-all'); const pAll = await import('p-all');
const actions = [] const actions = [];
for (const queueBuild of queuedBuilds) { for (const queueBuild of queuedBuilds) {
actions.push(async () => { actions.push(async () => {
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } }) let application = await prisma.application.findUnique({
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild where: { id: queueBuild.applicationId },
application = decryptApplication(application) include: {
const originalApplicationId = application.id dockerRegistry: true,
if (pullmergeRequestId) { destinationDocker: true,
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } }) gitSource: { include: { githubApp: true, gitlabApp: true } },
if (previewApplications.length > 0) { persistentStorage: true,
previewApplicationId = previewApplications[0].id secrets: true,
} settings: true,
} teams: true
const usableApplicationId = previewApplicationId || originalApplicationId
try {
if (queueBuild.status === 'running') {
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
} }
});
let {
id: buildId,
type,
gitSourceId,
sourceBranch = null,
pullmergeRequestId = null,
previewApplicationId = null,
forceRebuild,
sourceRepository = null
} = queueBuild;
application = decryptApplication(application);
if (!gitSourceId && application.simpleDockerfile) {
const { const {
id: applicationId, id: applicationId,
name,
destinationDocker, destinationDocker,
destinationDockerId, destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets, secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
} = application
let {
branch,
repository,
buildPack,
port, port,
installCommand, persistentStorage,
buildCommand, exposePort,
startCommand, simpleDockerfile,
baseDirectory, dockerRegistry
publishDirectory, } = application;
dockerFileLocation, const { workdir } = await createDirectories({ repository: applicationId, buildId });
dockerComposeConfiguration, try {
denoMainFile if (queueBuild.status === 'running') {
} = application await saveBuildLog({
line: 'Building halted, restarting...',
buildId,
applicationId: application.id
});
}
const volumes =
persistentStorage?.map((storage) => {
if (storage.oldPath) {
return `${applicationId}${storage.path
.replace(/\//gi, '-')
.replace('-app', '')}:${storage.path}`;
}
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
}) || [];
if (destinationDockerId) {
await prisma.build.update({
where: { id: buildId },
data: { status: 'running' }
});
try {
const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}`
});
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({
dockerId: destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
} catch (error) {
//
}
let envs = [];
if (secrets.length > 0) {
envs = [
...envs,
...generateSecrets(secrets, pullmergeRequestId, false, port)
];
}
await fs.writeFile(`${workdir}/Dockerfile`, simpleDockerfile);
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
await saveDockerRegistryCredentials({ url, username, password, workdir });
}
const labels = makeLabelForSimpleDockerfile({
applicationId,
type,
port: exposePort ? `${exposePort}:${port}` : port
});
try {
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[applicationId]: {
build: {
context: workdir
},
image: `${applicationId}:${buildId}`,
container_name: applicationId,
volumes,
labels,
environment: envs,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(destinationDocker.network)
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeCommand({
debug: true,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
}
} catch (error) {
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id });
}
if (error instanceof Error) {
await saveBuildLog({
line: error.message,
buildId,
applicationId: application.id
});
}
await fs.rm(workdir, { recursive: true, force: true });
return;
}
try {
if (application.dockerRegistryImageName) {
const customTag = application.dockerRegistryImageName.split(':')[1] || buildId;
const imageName = application.dockerRegistryImageName.split(':')[0];
await saveBuildLog({
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
buildId,
applicationId: application.id
});
await pushToRegistry(application, workdir, buildId, imageName, customTag);
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
}
} catch (error) {
if (error.stdout) {
await saveBuildLog({ line: error.stdout, buildId, applicationId });
}
if (error.stderr) {
await saveBuildLog({ line: error.stderr, buildId, applicationId });
}
} finally {
await fs.rm(workdir, { recursive: true, force: true });
await prisma.build.update({
where: { id: buildId },
data: { status: 'success' }
});
}
return;
}
const originalApplicationId = application.id;
const {
id: applicationId,
name,
destinationDocker,
destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
gitCommitHash,
dockerRegistry
} = application;
let {
branch,
repository,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
dockerComposeFileLocation,
dockerComposeConfiguration,
denoMainFile
} = application;
let imageId = applicationId;
let domain = getDomain(fqdn);
let location = null;
let tag = null;
let customTag = null;
let imageName = null;
let imageFoundLocally = false;
let imageFoundRemotely = false;
if (pullmergeRequestId) {
const previewApplications = await prisma.previewApplication.findMany({
where: { applicationId: originalApplicationId, pullmergeRequestId }
});
if (previewApplications.length > 0) {
previewApplicationId = previewApplications[0].id;
}
// Previews, we need to get the source branch and set subdomain
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
repository = sourceRepository || repository;
}
const { workdir, repodir } = await createDirectories({ repository, buildId });
try {
if (queueBuild.status === 'running') {
await saveBuildLog({
line: 'Building halted, restarting...',
buildId,
applicationId: application.id
});
}
const currentHash = crypto const currentHash = crypto
.createHash('sha256') .createHash('sha256')
.update( .update(
@@ -113,24 +367,26 @@ import * as buildpacks from '../lib/buildPacks';
) )
.digest('hex'); .digest('hex');
const { debug } = settings; const { debug } = settings;
let imageId = applicationId; if (!debug) {
let domain = getDomain(fqdn); await saveBuildLog({
line: `Debug logging is disabled. Enable it above if necessary!`,
buildId,
applicationId
});
}
const volumes = const volumes =
persistentStorage?.map((storage) => { persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : '' if (storage.oldPath) {
}${storage.path}`; return `${applicationId}${storage.path
.replace(/\//gi, '-')
.replace('-app', '')}:${storage.path}`;
}
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
}) || []; }) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
repository = sourceRepository || repository;
}
try { try {
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration) dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration);
} catch (error) { } } catch (error) {}
let deployNeeded = true; let deployNeeded = true;
let destinationType; let destinationType;
@@ -138,8 +394,11 @@ import * as buildpacks from '../lib/buildPacks';
destinationType = 'docker'; destinationType = 'docker';
} }
if (destinationType === 'docker') { if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } }); await prisma.build.update({
const { workdir, repodir } = await createDirectories({ repository, buildId }); where: { id: buildId },
data: { status: 'running' }
});
const configuration = await setDefaultConfiguration(application); const configuration = await setDefaultConfiguration(application);
buildPack = configuration.buildPack; buildPack = configuration.buildPack;
@@ -150,6 +409,7 @@ import * as buildpacks from '../lib/buildPacks';
publishDirectory = configuration.publishDirectory; publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory || ''; baseDirectory = configuration.baseDirectory || '';
dockerFileLocation = configuration.dockerFileLocation; dockerFileLocation = configuration.dockerFileLocation;
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
denoMainFile = configuration.denoMainFile; denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({ const commit = await importers[gitSource.type]({
applicationId, applicationId,
@@ -159,6 +419,8 @@ import * as buildpacks from '../lib/buildPacks';
githubAppId: gitSource.githubApp?.id, githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id, gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort, customPort: gitSource.customPort,
gitCommitHash,
configuration,
repository, repository,
branch, branch,
buildId, buildId,
@@ -172,20 +434,35 @@ import * as buildpacks from '../lib/buildPacks';
if (!commit) { if (!commit) {
throw new Error('No commit found?'); throw new Error('No commit found?');
} }
let tag = commit.slice(0, 7); tag = commit.slice(0, 7);
if (pullmergeRequestId) { if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`; tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
} }
if (application.dockerRegistryImageName) {
imageName = application.dockerRegistryImageName.split(':')[0];
customTag = application.dockerRegistryImageName.split(':')[1] || tag;
} else {
customTag = tag;
imageName = applicationId;
}
if (pullmergeRequestId) {
customTag = `${customTag}-${pullmergeRequestId}`;
}
try { try {
await prisma.build.update({ where: { id: buildId }, data: { commit } }); await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) { } } catch (err) {}
if (!pullmergeRequestId) { if (!pullmergeRequestId) {
if (configHash !== currentHash) { if (configHash !== currentHash) {
deployNeeded = true; deployNeeded = true;
if (configHash) { if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId }); await saveBuildLog({
line: 'Configuration changed',
buildId,
applicationId
});
} }
} else { } else {
deployNeeded = false; deployNeeded = false;
@@ -194,17 +471,47 @@ import * as buildpacks from '../lib/buildPacks';
deployNeeded = true; deployNeeded = true;
} }
let imageFound = false;
try { try {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}` command: `docker image inspect ${applicationId}:${tag}`
}) });
imageFound = true; imageFoundLocally = true;
} catch (error) { } catch (error) {
// //
} }
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage); if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
location = await saveDockerRegistryCredentials({
url,
username,
password,
workdir
});
}
try {
await executeCommand({
dockerId: destinationDocker.id,
command: `docker ${
location ? `--config ${location}` : ''
} pull ${imageName}:${customTag}`
});
imageFoundRemotely = true;
} catch (error) {
//
}
let imageFound = `${applicationId}:${tag}`;
if (imageFoundRemotely) {
imageFound = `${imageName}:${customTag}`;
}
await copyBaseConfigurationFiles(
buildPack,
workdir,
buildId,
applicationId,
baseImage
);
const labels = makeLabelForStandaloneApplication({ const labels = makeLabelForStandaloneApplication({
applicationId, applicationId,
fqdn, fqdn,
@@ -223,8 +530,8 @@ import * as buildpacks from '../lib/buildPacks';
baseDirectory, baseDirectory,
publishDirectory publishDirectory
}); });
if (forceRebuild) deployNeeded = true if (forceRebuild) deployNeeded = true;
if (!imageFound || deployNeeded) { if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
if (buildpacks[buildPack]) if (buildpacks[buildPack])
await buildpacks[buildPack]({ await buildpacks[buildPack]({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
@@ -258,45 +565,84 @@ import * as buildpacks from '../lib/buildPacks';
pythonVariable, pythonVariable,
dockerFileLocation, dockerFileLocation,
dockerComposeConfiguration, dockerComposeConfiguration,
dockerComposeFileLocation,
denoMainFile, denoMainFile,
denoOptions, denoOptions,
baseImage, baseImage,
baseBuildImage, baseBuildImage,
deploymentType, deploymentType,
forceRebuild
}); });
else { else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId }); await saveBuildLog({
line: `Build pack ${buildPack} not found`,
buildId,
applicationId
});
throw new Error(`Build pack ${buildPack} not found.`); throw new Error(`Build pack ${buildPack} not found.`);
} }
} else { } else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId }); if (imageFoundRemotely || deployNeeded) {
await saveBuildLog({
line: `Container image ${imageFound} found in Docker Registry - reuising it`,
buildId,
applicationId
});
} else {
if (imageFoundLocally || deployNeeded) {
await saveBuildLog({
line: `Container image ${imageFound} found locally - reuising it`,
buildId,
applicationId
});
}
}
} }
if (buildPack === 'compose') { if (buildPack === 'compose') {
try { try {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId, dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0` command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
}) });
await executeDockerCmd({ if (containers) {
dockerId: destinationDockerId, const containerArray = containers.split('\n');
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force` if (containerArray.length > 0) {
}) for (const container of containerArray) {
await executeCommand({
dockerId: destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
} catch (error) { } catch (error) {
// //
} }
try { try {
await executeDockerCmd({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` }) await executeCommand({
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId }); debug,
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId }); buildId,
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } }); applicationId,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
await prisma.build.update({
where: { id: buildId },
data: { status: 'success' }
});
await prisma.application.update({ await prisma.application.update({
where: { id: applicationId }, where: { id: applicationId },
data: { configHash: currentHash } data: { configHash: currentHash }
}); });
} catch (error) { } catch (error) {
await saveBuildLog({ line: error, buildId, applicationId }); await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } }) const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) { if (foundBuild) {
await prisma.build.update({ await prisma.build.update({
where: { id: buildId }, where: { id: buildId },
@@ -307,49 +653,44 @@ import * as buildpacks from '../lib/buildPacks';
} }
throw new Error(error); throw new Error(error);
} }
} else { } else {
try { try {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId, dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0` command: `docker ps -a --filter 'label=com.docker.compose.service=${
}) pullmergeRequestId ? imageId : applicationId
await executeDockerCmd({ }' --format {{.ID}}`
dockerId: destinationDockerId, });
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force` if (containers) {
}) const containerArray = containers.split('\n');
} catch (error) { if (containerArray.length > 0) {
// for (const container of containerArray) {
} await executeCommand({
const envs = [ dockerId: destinationDockerId,
`PORT=${port}` command: `docker stop -t 0 ${container}`
]; });
if (secrets.length > 0) { await executeCommand({
secrets.forEach((secret) => { dockerId: destinationDockerId,
if (pullmergeRequestId) { command: `docker rm --force ${container}`
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret) });
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
} }
} }
}); }
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) { } catch (error) {
// //
} }
let envs = [];
if (secrets.length > 0) {
envs = [
...envs,
...generateSecrets(secrets, pullmergeRequestId, false, port)
];
}
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
await saveDockerRegistryCredentials({ url, username, password, workdir });
}
try { try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => { const composeVolumes = volumes.map((volume) => {
return { return {
[`${volume.split(':')[0]}`]: { [`${volume.split(':')[0]}`]: {
@@ -361,15 +702,15 @@ import * as buildpacks from '../lib/buildPacks';
version: '3.8', version: '3.8',
services: { services: {
[imageId]: { [imageId]: {
image: `${applicationId}:${tag}`, image: imageFound,
container_name: imageId, container_name: imageId,
volumes, volumes,
env_file: envFound ? [`${workdir}/.env`] : [], environment: envs,
labels, labels,
depends_on: [], depends_on: [],
expose: [port], expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}), ...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(destinationDocker.network), ...defaultComposeConfiguration(destinationDocker.network)
} }
}, },
networks: { networks: {
@@ -380,11 +721,15 @@ import * as buildpacks from '../lib/buildPacks';
volumes: Object.assign({}, ...composeVolumes) volumes: Object.assign({}, ...composeVolumes)
}; };
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile)); await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` }) await executeCommand({
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId }); debug,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) { } catch (error) {
await saveBuildLog({ line: error, buildId, applicationId }); await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } }) const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) { if (foundBuild) {
await prisma.build.update({ await prisma.build.update({
where: { id: buildId }, where: { id: buildId },
@@ -395,17 +740,16 @@ import * as buildpacks from '../lib/buildPacks';
} }
throw new Error(error); throw new Error(error);
} }
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } }); if (!pullmergeRequestId)
if (!pullmergeRequestId) await prisma.application.update({ await prisma.application.update({
where: { id: applicationId }, where: { id: applicationId },
data: { configHash: currentHash } data: { configHash: currentHash }
}); });
} }
} }
} } catch (error) {
catch (error) { const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) { if (foundBuild) {
await prisma.build.update({ await prisma.build.update({
where: { id: buildId }, where: { id: buildId },
@@ -417,17 +761,47 @@ import * as buildpacks from '../lib/buildPacks';
if (error !== 1) { if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id }); await saveBuildLog({ line: error, buildId, applicationId: application.id });
} }
if (error instanceof Error) {
await saveBuildLog({
line: error.message,
buildId,
applicationId: application.id
});
}
await fs.rm(workdir, { recursive: true, force: true });
return;
}
try {
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
await saveBuildLog({
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
buildId,
applicationId: application.id
});
await pushToRegistry(application, workdir, tag, imageName, customTag);
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
}
} catch (error) {
if (error.stdout) {
await saveBuildLog({ line: error.stdout, buildId, applicationId });
}
if (error.stderr) {
await saveBuildLog({ line: error.stderr, buildId, applicationId });
}
} finally {
await fs.rm(workdir, { recursive: true, force: true });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
} }
}); });
} }
await pAll.default(actions, { concurrency }) await pAll.default(actions, { concurrency });
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
}) });
while (true) { while (true) {
await th() await th();
} }
} else process.exit(0); } else process.exit(0);
})(); })();

View File

@@ -1,7 +1,33 @@
import cuid from "cuid"; import cuid from "cuid";
import { decrypt, encrypt, fixType, generatePassword, prisma } from "./lib/common"; import { decrypt, encrypt, fixType, generatePassword, generateToken, prisma } from "./lib/common";
import { getTemplates } from "./lib/services"; import { getTemplates } from "./lib/services";
export async function migrateApplicationPersistentStorage() {
const settings = await prisma.setting.findFirst()
if (settings) {
const { id: settingsId, applicationStoragePathMigrationFinished } = settings
try {
if (!applicationStoragePathMigrationFinished) {
const applications = await prisma.application.findMany({ include: { persistentStorage: true } });
for (const application of applications) {
if (application.persistentStorage && application.persistentStorage.length > 0 && application?.buildPack !== 'docker') {
for (const storage of application.persistentStorage) {
let { id, path } = storage
if (!path.startsWith('/app')) {
path = `/app${path}`
await prisma.applicationPersistentStorage.update({ where: { id }, data: { path, oldPath: true } })
}
}
}
}
}
} catch (error) {
console.log(error)
} finally {
await prisma.setting.update({ where: { id: settingsId }, data: { applicationStoragePathMigrationFinished: true } })
}
}
}
export async function migrateServicesToNewTemplate() { export async function migrateServicesToNewTemplate() {
// This function migrates old hardcoded services to the new template based services // This function migrates old hardcoded services to the new template based services
try { try {
@@ -57,39 +83,42 @@ export async function migrateServicesToNewTemplate() {
} catch (error) { } catch (error) {
console.log(error) console.log(error)
} }
if (template.variables) {
if (template.variables.length > 0) { if (template.variables.length > 0) {
for (const variable of template.variables) {
const { defaultValue } = variable;
const regex = /^\$\$.*\((\d+)\)$/g;
const length = Number(regex.exec(defaultValue)?.[1]) || undefined
if (variable.defaultValue.startsWith('$$generate_password')) {
variable.value = generatePassword({ length });
} else if (variable.defaultValue.startsWith('$$generate_hex')) {
variable.value = generatePassword({ length, isHex: true });
} else if (variable.defaultValue.startsWith('$$generate_username')) {
variable.value = cuid();
} else if (variable.defaultValue.startsWith('$$generate_token')) {
variable.value = generateToken()
} else {
variable.value = variable.defaultValue || '';
}
}
}
for (const variable of template.variables) { for (const variable of template.variables) {
const { defaultValue } = variable; if (variable.id.startsWith('$$secret_')) {
const regex = /^\$\$.*\((\d+)\)$/g; const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
const length = Number(regex.exec(defaultValue)?.[1]) || undefined if (!found) {
if (variable.defaultValue.startsWith('$$generate_password')) { await prisma.serviceSecret.create({
variable.value = generatePassword({ length }); data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
} else if (variable.defaultValue.startsWith('$$generate_hex')) { })
variable.value = generatePassword({ length, isHex: true }); }
} else if (variable.defaultValue.startsWith('$$generate_username')) {
variable.value = cuid();
} else {
variable.value = variable.defaultValue || '';
}
}
}
for (const variable of template.variables) {
if (variable.id.startsWith('$$secret_')) {
const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) {
await prisma.serviceSecret.create({
data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
})
}
} }
if (variable.id.startsWith('$$config_')) { if (variable.id.startsWith('$$config_')) {
const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } }) const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) { if (!found) {
await prisma.serviceSetting.create({ await prisma.serviceSetting.create({
data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } } data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } }
}) })
}
} }
} }
} }
@@ -221,7 +250,7 @@ async function hasura(service: any, template: any) {
const { id } = service const { id } = service
const secrets = [ const secrets = [
`HASURA_GRAPHQL_ADMIN_PASSWORD@@@${graphQLAdminPassword}`, `HASURA_GRAPHQL_ADMIN_SECRET@@@${graphQLAdminPassword}`,
`HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`, `HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`, `POSTGRES_PASSWORD@@@${postgresqlPassword}`,
] ]
@@ -238,7 +267,6 @@ async function hasura(service: any, template: any) {
async function umami(service: any, template: any) { async function umami(service: any, template: any) {
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami
const { id } = service const { id } = service
const secrets = [ const secrets = [
`HASH_SALT@@@${hashSalt}`, `HASH_SALT@@@${hashSalt}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`, `POSTGRES_PASSWORD@@@${postgresqlPassword}`,
@@ -439,7 +467,6 @@ async function plausibleAnalytics(service: any, template: any) {
// Disconnect old service data // Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } }) // await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
} }
async function migrateSettings(settings: any[], service: any, template: any) { async function migrateSettings(settings: any[], service: any, template: any) {
for (const setting of settings) { for (const setting of settings) {
try { try {
@@ -457,9 +484,9 @@ async function migrateSettings(settings: any[], service: any, template: any) {
variableName = `$$config_${name.toLowerCase()}` variableName = `$$config_${name.toLowerCase()}`
} }
// console.log('Migrating setting', name, value, 'for service', service.id, ', service name:', service.name, 'variableName: ', variableName) // console.log('Migrating setting', name, value, 'for service', service.id, ', service name:', service.name, 'variableName: ', variableName)
await prisma.serviceSetting.findFirst({ where: { name: minio, serviceId: service.id } }) || await prisma.serviceSetting.create({ data: { name: minio, value, variableName, service: { connect: { id: service.id } } } }) await prisma.serviceSetting.findFirst({ where: { name: minio, serviceId: service.id } }) || await prisma.serviceSetting.create({ data: { name: minio, value, variableName, service: { connect: { id: service.id } } } })
} catch(error) { } catch (error) {
console.log(error) console.log(error)
} }
} }
@@ -474,7 +501,7 @@ async function migrateSecrets(secrets: any[], service: any) {
} }
// console.log('Migrating secret', name, value, 'for service', service.id, ', service name:', service.name) // console.log('Migrating secret', name, value, 'for service', service.id, ', service name:', service.name)
await prisma.serviceSecret.findFirst({ where: { name, serviceId: service.id } }) || await prisma.serviceSecret.create({ data: { name, value, service: { connect: { id: service.id } } } }) await prisma.serviceSecret.findFirst({ where: { name, serviceId: service.id } }) || await prisma.serviceSecret.create({ data: { name, value, service: { connect: { id: service.id } } } })
} catch(error) { } catch (error) {
console.log(error) console.log(error)
} }
} }
@@ -500,4 +527,4 @@ async function createVolumes(service: any, template: any) {
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name) // console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } }) await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
} }
} }

View File

@@ -1,6 +1,18 @@
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common"; import {
base64Encode,
decrypt,
encrypt,
executeCommand,
generateSecrets,
generateTimestamp,
getDomain,
isARM,
isDev,
prisma,
version
} from '../common';
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { day } from "../dayjs"; import { day } from '../dayjs';
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy']; const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
const nodeBased = [ const nodeBased = [
@@ -17,7 +29,10 @@ const nodeBased = [
'nextjs' 'nextjs'
]; ];
export function setDefaultBaseImage(buildPack: string | null, deploymentType: string | null = null) { export function setDefaultBaseImage(
buildPack: string | null,
deploymentType: string | null = null
) {
const nodeVersions = [ const nodeVersions = [
{ {
value: 'node:lts', value: 'node:lts',
@@ -52,6 +67,14 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
{ {
value: 'webdevops/apache:alpine', value: 'webdevops/apache:alpine',
label: 'webdevops/apache:alpine' label: 'webdevops/apache:alpine'
},
{
value: 'nginx:alpine',
label: 'nginx:alpine'
},
{
value: 'httpd:alpine',
label: 'httpd:alpine (Apache)'
} }
]; ];
const rustVersions = [ const rustVersions = [
@@ -214,8 +237,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
label: 'webdevops/php-apache:7.1-alpine' label: 'webdevops/php-apache:7.1-alpine'
}, },
{ {
value: 'webdevops/php-nginx:7.1-alpine', value: 'php:8.1-fpm',
label: 'webdevops/php-nginx:7.1-alpine' label: 'php:8.1-fpm'
},
{
value: 'php:8.0-fpm',
label: 'php:8.0-fpm'
},
{
value: 'php:8.1-fpm-alpine',
label: 'php:8.1-fpm-alpine'
},
{
value: 'php:8.0-fpm-alpine',
label: 'php:8.0-fpm-alpine'
} }
]; ];
const pythonVersions = [ const pythonVersions = [
@@ -296,8 +331,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
{ {
value: 'heroku/builder-classic:22', value: 'heroku/builder-classic:22',
label: 'heroku/builder-classic:22' label: 'heroku/builder-classic:22'
}, }
] ];
let payload: any = { let payload: any = {
baseImage: null, baseImage: null,
baseBuildImage: null, baseBuildImage: null,
@@ -306,8 +341,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
}; };
if (nodeBased.includes(buildPack)) { if (nodeBased.includes(buildPack)) {
if (deploymentType === 'static') { if (deploymentType === 'static') {
payload.baseImage = 'webdevops/nginx:alpine'; payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
payload.baseImages = staticVersions; payload.baseImages = isARM(process.arch)
? staticVersions.filter((version) => !version.value.includes('webdevops'))
: staticVersions;
payload.baseBuildImage = 'node:lts'; payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions; payload.baseBuildImages = nodeVersions;
} else { } else {
@@ -318,8 +355,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
} }
} }
if (staticApps.includes(buildPack)) { if (staticApps.includes(buildPack)) {
payload.baseImage = 'webdevops/nginx:alpine'; payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
payload.baseImages = staticVersions; payload.baseImages = isARM(process.arch)
? staticVersions.filter((version) => !version.value.includes('webdevops'))
: staticVersions;
payload.baseBuildImage = 'node:lts'; payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions; payload.baseBuildImages = nodeVersions;
} }
@@ -337,12 +376,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
payload.baseImage = 'denoland/deno:latest'; payload.baseImage = 'denoland/deno:latest';
} }
if (buildPack === 'php') { if (buildPack === 'php') {
payload.baseImage = 'webdevops/php-apache:8.2-alpine'; payload.baseImage = isARM(process.arch)
payload.baseImages = phpVersions; ? 'php:8.1-fpm-alpine'
: 'webdevops/php-apache:8.2-alpine';
payload.baseImages = isARM(process.arch)
? phpVersions.filter((version) => !version.value.includes('webdevops'))
: phpVersions;
} }
if (buildPack === 'laravel') { if (buildPack === 'laravel') {
payload.baseImage = 'webdevops/php-apache:8.2-alpine'; payload.baseImage = isARM(process.arch)
payload.baseImages = phpVersions; ? 'php:8.1-fpm-alpine'
: 'webdevops/php-apache:8.2-alpine';
payload.baseImages = isARM(process.arch)
? phpVersions.filter((version) => !version.value.includes('webdevops'))
: phpVersions;
payload.baseBuildImage = 'node:18'; payload.baseBuildImage = 'node:18';
payload.baseBuildImages = nodeVersions; payload.baseBuildImages = nodeVersions;
} }
@@ -363,6 +410,7 @@ export const setDefaultConfiguration = async (data: any) => {
publishDirectory, publishDirectory,
baseDirectory, baseDirectory,
dockerFileLocation, dockerFileLocation,
dockerComposeFileLocation,
denoMainFile denoMainFile
} = data; } = data;
//@ts-ignore //@ts-ignore
@@ -384,7 +432,8 @@ export const setDefaultConfiguration = async (data: any) => {
if (!publishDirectory) publishDirectory = template?.publishDirectory || null; if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (baseDirectory) { if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`; if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1); if (baseDirectory.endsWith('/') && baseDirectory !== '/')
baseDirectory = baseDirectory.slice(0, -1);
} }
if (dockerFileLocation) { if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`; if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
@@ -392,6 +441,14 @@ export const setDefaultConfiguration = async (data: any) => {
} else { } else {
dockerFileLocation = '/Dockerfile'; dockerFileLocation = '/Dockerfile';
} }
if (dockerComposeFileLocation) {
if (!dockerComposeFileLocation.startsWith('/'))
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
if (dockerComposeFileLocation.endsWith('/'))
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
} else {
dockerComposeFileLocation = '/Dockerfile';
}
if (!denoMainFile) { if (!denoMainFile) {
denoMainFile = 'main.ts'; denoMainFile = 'main.ts';
} }
@@ -405,6 +462,7 @@ export const setDefaultConfiguration = async (data: any) => {
publishDirectory, publishDirectory,
baseDirectory, baseDirectory,
dockerFileLocation, dockerFileLocation,
dockerComposeFileLocation,
denoMainFile denoMainFile
}; };
}; };
@@ -451,7 +509,6 @@ export const scanningTemplates = {
} }
}; };
export const saveBuildLog = async ({ export const saveBuildLog = async ({
line, line,
buildId, buildId,
@@ -461,14 +518,26 @@ export const saveBuildLog = async ({
buildId: string; buildId: string;
applicationId: string; applicationId: string;
}): Promise<any> => { }): Promise<any> => {
const { default: got } = await import('got') if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
const { default: got } = await import('got');
if (typeof line === 'object' && line) {
if (line.shortMessage) {
line = line.shortMessage + '\n' + line.stderr;
} else {
line = JSON.stringify(line);
}
}
if (line && typeof line === 'string' && line.includes('ghs_')) { if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g; const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@'); line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
} }
const addTimestamp = `[${generateTimestamp()}] ${line}`; const addTimestamp = `[${generateTimestamp()}] ${line}`;
const fluentBitUrl = isDev ? process.env.COOLIFY_CONTAINER_DEV === 'true' ? 'http://coolify-fluentbit:24224' : 'http://localhost:24224' : 'http://coolify-fluentbit:24224'; const fluentBitUrl = isDev
? process.env.COOLIFY_CONTAINER_DEV === 'true'
? 'http://coolify-fluentbit:24224'
: 'http://localhost:24224'
: 'http://coolify-fluentbit:24224';
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) { if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
console.debug(`[${applicationId}] ${addTimestamp}`); console.debug(`[${applicationId}] ${addTimestamp}`);
@@ -478,15 +547,17 @@ export const saveBuildLog = async ({
json: { json: {
line: encrypt(line) line: encrypt(line)
} }
}) });
} catch (error) { } catch (error) {
return await prisma.buildLog.create({ return await prisma.buildLog.create({
data: { data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId line: addTimestamp,
buildId,
time: Number(day().valueOf()),
applicationId
} }
}); });
} }
}; };
export async function copyBaseConfigurationFiles( export async function copyBaseConfigurationFiles(
@@ -558,6 +629,7 @@ export async function copyBaseConfigurationFiles(
` `
); );
} }
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
} catch (error) { } catch (error) {
throw new Error(error); throw new Error(error);
} }
@@ -571,6 +643,29 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
); );
} }
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
if (!username || !password) {
return null;
}
let decryptedPassword = decrypt(password);
const location = `${workdir}/.docker`;
try {
await fs.mkdir(`${workdir}/.docker`);
} catch (error) {
// console.log(error);
}
const payload = JSON.stringify({
auths: {
[url]: {
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
}
}
});
await fs.writeFile(`${location}/config.json`, payload);
return location;
}
export async function buildImage({ export async function buildImage({
applicationId, applicationId,
tag, tag,
@@ -580,36 +675,51 @@ export async function buildImage({
isCache = false, isCache = false,
debug = false, debug = false,
dockerFileLocation = '/Dockerfile', dockerFileLocation = '/Dockerfile',
commit commit,
forceRebuild = false
}) { }) {
if (isCache) { if (isCache) {
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
} else { } else {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
} }
if (!debug) { const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
await saveBuildLog({ const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`;
line: `Debug turned off. To see more details, allow it in the features tab.`, let location = null;
buildId,
applicationId
});
}
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
const { status } = await prisma.build.findUnique({ where: { id: buildId } }) const { dockerRegistry } = await prisma.application.findUnique({
if (status === 'canceled') { where: { id: applicationId },
throw new Error('Deployment canceled.') select: { dockerRegistry: true }
});
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
} }
if (isCache) {
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId }); await executeCommand({
} else { stream: true,
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId }); debug,
buildId,
applicationId,
dockerId,
command: `docker ${location ? `--config ${location}` : ''} build ${
forceRebuild ? '--no-cache' : ''
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
});
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
if (status === 'canceled') {
throw new Error('Canceled.');
} }
} }
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.applicationId=${applicationId}`,
`coolify.type=standalone-application`
];
}
export function makeLabelForStandaloneApplication({ export function makeLabelForStandaloneApplication({
applicationId, applicationId,
fqdn, fqdn,
@@ -638,6 +748,7 @@ export function makeLabelForStandaloneApplication({
`coolify.version=${version}`, `coolify.version=${version}`,
`coolify.applicationId=${applicationId}`, `coolify.applicationId=${applicationId}`,
`coolify.type=standalone-application`, `coolify.type=standalone-application`,
`coolify.name=${name}`,
`coolify.configuration=${base64Encode( `coolify.configuration=${base64Encode(
JSON.stringify({ JSON.stringify({
applicationId, applicationId,
@@ -677,21 +788,8 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (isPnpm) { if (isPnpm) {
@@ -701,7 +799,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
if (installCommand) { if (installCommand) {
Dockerfile.push(`RUN ${installCommand}`); Dockerfile.push(`RUN ${installCommand}`);
} }
// Dockerfile.push(`ARG CACHEBUST=1`);
Dockerfile.push(`RUN ${buildCommand}`); Dockerfile.push(`RUN ${buildCommand}`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n')); await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true }); await buildImage({ ...data, isCache: true });
@@ -709,27 +806,13 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
export async function buildCacheImageForLaravel(data, imageForBuild) { export async function buildCacheImageForLaravel(data, imageForBuild) {
const { workdir, buildId, secrets, pullmergeRequestId } = data; const { workdir, buildId, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = []; const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`); Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
Dockerfile.push(`COPY *.json *.mix.js /app/`); Dockerfile.push(`COPY *.json *.mix.js /app/`);
@@ -740,11 +823,7 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
} }
export async function buildCacheImageWithCargo(data, imageForBuild) { export async function buildCacheImageWithCargo(data, imageForBuild) {
const { const { applicationId, workdir, buildId } = data;
applicationId,
workdir,
buildId,
} = data;
const Dockerfile: Array<string> = []; const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`); Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);

View File

@@ -1,100 +1,109 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { defaultComposeConfiguration, executeDockerCmd } from '../common'; import { defaultComposeConfiguration, executeCommand, generateSecrets } from '../common';
import { buildImage, saveBuildLog } from './common'; import { saveBuildLog } from './common';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
export default async function (data) { export default async function (data) {
let { let {
applicationId, applicationId,
debug, debug,
buildId, buildId,
dockerId, dockerId,
network, network,
volumes, volumes,
labels, labels,
workdir, workdir,
baseDirectory, baseDirectory,
secrets, secrets,
pullmergeRequestId, pullmergeRequestId,
port, dockerComposeConfiguration,
dockerComposeConfiguration dockerComposeFileLocation
} = data } = data;
const fileYml = `${workdir}${baseDirectory}/docker-compose.yml`; const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
const fileYaml = `${workdir}${baseDirectory}/docker-compose.yaml`; const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
let dockerComposeRaw = null; const dockerComposeYaml = yaml.load(dockerComposeRaw);
let isYml = false; if (!dockerComposeYaml.services) {
try { throw 'No Services found in docker-compose file.';
dockerComposeRaw = await fs.readFile(`${fileYml}`, 'utf8') }
isYml = true let envs = [];
} catch (error) { } if (secrets.length > 0) {
try { envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
dockerComposeRaw = await fs.readFile(`${fileYaml}`, 'utf8') }
} catch (error) { }
if (!dockerComposeRaw) { const composeVolumes = [];
throw ('docker-compose.yml or docker-compose.yaml are not found!'); if (volumes.length > 0) {
} for (const volume of volumes) {
const dockerComposeYaml = yaml.load(dockerComposeRaw) let [v, path] = volume.split(':');
if (!dockerComposeYaml.services) { composeVolumes[v] = {
throw 'No Services found in docker-compose file.' name: v
} };
const envs = [ }
`PORT=${port}` }
];
if (secrets.length > 0) { let networks = {};
secrets.forEach((secret) => { for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
if (pullmergeRequestId) { value['container_name'] = `${applicationId}-${key}`;
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret) let environment = typeof value['environment'] === 'undefined' ? [] : value['environment']
if (isSecretFound.length > 0) { value['environment'] = [...environment, ...envs];
envs.push(`${secret.name}=${isSecretFound[0].value}`); value['labels'] = labels;
} else { // TODO: If we support separated volume for each service, we need to add it here
envs.push(`${secret.name}=${secret.value}`); if (value['volumes']?.length > 0) {
} value['volumes'] = value['volumes'].map((volume) => {
} else { let [v, path, permission] = volume.split(':');
if (!secret.isPRMRSecret) { if (!path) {
envs.push(`${secret.name}=${secret.value}`); path = v;
} v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
} } else {
}); v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
} }
await fs.writeFile(`${workdir}/.env`, envs.join('\n')); composeVolumes[v] = {
let envFound = false; name: v
try { };
envFound = !!(await fs.stat(`${workdir}/.env`)); return `${v}:${path}${permission ? ':' + permission : ''}`;
} catch (error) { });
// }
} if (volumes.length > 0) {
const composeVolumes = volumes.map((volume) => { for (const volume of volumes) {
return { value['volumes'].push(volume);
[`${volume.split(':')[0]}`]: { }
name: volume.split(':')[0] }
} if (dockerComposeConfiguration[key].port) {
}; value['expose'] = [dockerComposeConfiguration[key].port];
}); }
let networks = {} if (value['networks']?.length > 0) {
for (let [key, value] of Object.entries(dockerComposeYaml.services)) { value['networks'].forEach((network) => {
value['container_name'] = `${applicationId}-${key}` networks[network] = {
value['env_file'] = envFound ? [`${workdir}/.env`] : [] name: network
value['labels'] = labels };
value['volumes'] = volumes });
if (dockerComposeConfiguration[key].port) { }
value['expose'] = [dockerComposeConfiguration[key].port] value['networks'] = [...(value['networks'] || ''), network];
} dockerComposeYaml.services[key] = {
if (value['networks']?.length > 0) { ...dockerComposeYaml.services[key],
value['networks'].forEach((network) => { restart: defaultComposeConfiguration(network).restart,
networks[network] = { deploy: defaultComposeConfiguration(network).deploy
name: network };
} }
}) if (Object.keys(composeVolumes).length > 0) {
} dockerComposeYaml['volumes'] = { ...composeVolumes };
value['networks'] = [...value['networks'] || '', network] }
dockerComposeYaml.services[key] = { ...dockerComposeYaml.services[key], restart: defaultComposeConfiguration(network).restart, deploy: defaultComposeConfiguration(network).deploy } dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
}
dockerComposeYaml['volumes'] = Object.assign({ ...dockerComposeYaml['volumes'] }, ...composeVolumes) await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } }) await executeCommand({
await fs.writeFile(`${workdir}/docker-compose.${isYml ? 'yml' : 'yaml'}`, yaml.dump(dockerComposeYaml)); debug,
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` }) buildId,
await saveBuildLog({ line: 'Pulling images from Compose file.', buildId, applicationId }); applicationId,
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` }) dockerId,
await saveBuildLog({ line: 'Building images from Compose file.', buildId, applicationId }); command: `docker compose --project-directory ${workdir} pull`
});
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
await executeCommand({
debug,
buildId,
applicationId,
dockerId,
command: `docker compose --project-directory ${workdir} build --progress plain`
});
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
} }

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common'; import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (depsFound) { if (depsFound) {

View File

@@ -1,45 +1,27 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common'; import { buildImage } from './common';
export default async function (data) { export default async function (data) {
let { let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
applicationId,
debug,
tag,
workdir,
buildId,
baseDirectory,
secrets,
pullmergeRequestId,
dockerFileLocation
} = data
const file = `${workdir}${baseDirectory}${dockerFileLocation}`; const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
data.workdir = `${workdir}${baseDirectory}`; data.workdir = `${workdir}${baseDirectory}`;
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8') const DockerfileRaw = await fs.readFile(`${file}`, 'utf8');
const Dockerfile: Array<string> = DockerfileRaw const Dockerfile: Array<string> = DockerfileRaw.toString().trim().split('\n');
.toString() Dockerfile.forEach((line, index) => {
.trim() if (line.startsWith('FROM')) {
.split('\n'); Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); }
});
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.forEach((line, index) => {
if ( if (line.startsWith('FROM')) {
(pullmergeRequestId && secret.isPRMRSecret) || Dockerfile.splice(index + 1, 0, env);
(!pullmergeRequestId && !secret.isPRMRSecret)
) {
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
}
});
} }
} });
}); });
} }
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage(data); await buildImage(data);
} }

View File

@@ -1,17 +1,16 @@
import { executeDockerCmd, prisma } from "../common" import { executeCommand } from "../common"
import { saveBuildLog } from "./common"; import { saveBuildLog } from "./common";
export default async function (data: any): Promise<void> { export default async function (data: any): Promise<void> {
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
try { try {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
await executeDockerCmd({ await executeCommand({
buildId, buildId,
debug, debug,
dockerId, dockerId,
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}` command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
}) })
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
} catch (error) { } catch (error) {
throw error; throw error;
} }

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common'; import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (isPnpm) { if (isPnpm) {

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage, checkPnpm } from './common'; import { buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -20,21 +21,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (isPnpm) { if (isPnpm) {

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common'; import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (isPnpm) { if (isPnpm) {

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common'; import { buildImage } from './common';
const createDockerfile = async (data, image, htaccessFound): Promise<void> => { const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
@@ -13,21 +14,8 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
Dockerfile.push(`FROM ${image}`); Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common'; import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -18,21 +19,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (pythonWSGI?.toLowerCase() === 'gunicorn') { if (pythonWSGI?.toLowerCase() === 'gunicorn') {

View File

@@ -1,6 +1,6 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import TOML from '@iarna/toml'; import TOML from '@iarna/toml';
import { asyncExecShell } from '../common'; import { executeCommand } from '../common';
import { buildCacheImageWithCargo, buildImage } from './common'; import { buildCacheImageWithCargo, buildImage } from './common';
const createDockerfile = async (data, image, name): Promise<void> => { const createDockerfile = async (data, image, name): Promise<void> => {
@@ -28,7 +28,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
export default async function (data) { export default async function (data) {
try { try {
const { workdir, baseImage, baseBuildImage } = data; const { workdir, baseImage, baseBuildImage } = data;
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`); const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
const parsedToml: any = TOML.parse(cargoToml); const parsedToml: any = TOML.parse(cargoToml);
const name = parsedToml.package.name; const name = parsedToml.package.name;
await buildCacheImageWithCargo(data, baseBuildImage); await buildCacheImageWithCargo(data, baseBuildImage);

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage } from './common'; import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
@@ -18,24 +19,15 @@ const createDockerfile = async (data, image): Promise<void> => {
const Dockerfile: Array<string> = []; const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`); Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app'); if (baseImage?.includes('httpd')) {
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
} else {
Dockerfile.push('WORKDIR /app');
}
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
if (secret.isBuildSecret) { Dockerfile.push(env);
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
}); });
} }
if (buildCommand) { if (buildCommand) {

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import { executeDockerCmd } from './common'; import { executeCommand } from './common';
export function formatLabelsOnDocker(data) { export function formatLabelsOnDocker(data) {
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => { return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
@@ -16,7 +16,7 @@ export function formatLabelsOnDocker(data) {
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> { export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
let containerFound = false; let containerFound = false;
try { try {
const { stdout } = await executeDockerCmd({ const { stdout } = await executeCommand({
dockerId, dockerId,
command: command:
`docker inspect --format '{{json .State}}' ${container}` `docker inspect --format '{{json .State}}' ${container}`
@@ -28,27 +28,26 @@ export async function checkContainer({ dockerId, container, remove = false }: {
const isRestarting = status === 'restarting' const isRestarting = status === 'restarting'
const isExited = status === 'exited' const isExited = status === 'exited'
if (status === 'created') { if (status === 'created') {
await executeDockerCmd({ await executeCommand({
dockerId, dockerId,
command: command:
`docker rm ${container}` `docker rm ${container}`
}); });
} }
if (remove && status === 'exited') { if (remove && status === 'exited') {
await executeDockerCmd({ await executeCommand({
dockerId, dockerId,
command: command:
`docker rm ${container}` `docker rm ${container}`
}); });
} }
return { return {
found: containerFound, found: containerFound,
status: { status: {
isRunning, isRunning,
isRestarting, isRestarting,
isExited isExited
} }
}; };
} catch (err) { } catch (err) {
@@ -63,7 +62,7 @@ export async function checkContainer({ dockerId, container, remove = false }: {
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> { export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
let isExited = false; let isExited = false;
try { try {
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` }) const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
if (stdout.trim() === 'exited') { if (stdout.trim() === 'exited') {
isExited = true; isExited = true;
} }
@@ -82,13 +81,13 @@ export async function removeContainer({
dockerId: string; dockerId: string;
}): Promise<void> { }): Promise<void> {
try { try {
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` }) const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
if (JSON.parse(stdout).Running) { if (JSON.parse(stdout).Running) {
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` }) await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
await executeDockerCmd({ dockerId, command: `docker rm ${id}` }) await executeCommand({ dockerId, command: `docker rm ${id}` })
} }
if (JSON.parse(stdout).Status === 'exited') { if (JSON.parse(stdout).Status === 'exited') {
await executeDockerCmd({ dockerId, command: `docker rm ${id}` }) await executeCommand({ dockerId, command: `docker rm ${id}` })
} }
} catch (error) { } catch (error) {
throw error; throw error;

View File

@@ -1,7 +1,7 @@
import jsonwebtoken from 'jsonwebtoken'; import jsonwebtoken from 'jsonwebtoken';
import { saveBuildLog } from '../buildPacks/common'; import { saveBuildLog } from '../buildPacks/common';
import { asyncExecShell, decrypt, prisma } from '../common'; import { decrypt, executeCommand, prisma } from '../common';
export default async function ({ export default async function ({
applicationId, applicationId,
@@ -9,6 +9,7 @@ export default async function ({
githubAppId, githubAppId,
repository, repository,
apiUrl, apiUrl,
gitCommitHash,
htmlUrl, htmlUrl,
branch, branch,
buildId, buildId,
@@ -20,6 +21,7 @@ export default async function ({
githubAppId: string; githubAppId: string;
repository: string; repository: string;
apiUrl: string; apiUrl: string;
gitCommitHash?: string;
htmlUrl: string; htmlUrl: string;
branch: string; branch: string;
buildId: string; buildId: string;
@@ -28,16 +30,24 @@ export default async function ({
}): Promise<string> { }): Promise<string> {
const { default: got } = await import('got') const { default: got } = await import('got')
const url = htmlUrl.replace('https://', '').replace('http://', ''); const url = htmlUrl.replace('https://', '').replace('http://', '');
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
if (forPublic) { if (forPublic) {
await saveBuildLog({ await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`, line: `Cloning ${repository}:${branch}...`,
buildId, buildId,
applicationId applicationId
}); });
await asyncExecShell( if (gitCommitHash) {
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. ` await saveBuildLog({
); line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
await executeCommand({
command:
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
shell: true
});
} else { } else {
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } }); const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
@@ -62,15 +72,23 @@ export default async function ({
}) })
.json(); .json();
await saveBuildLog({ await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`, line: `Cloning ${repository}:${branch}...`,
buildId, buildId,
applicationId applicationId
}); });
await asyncExecShell( if (gitCommitHash) {
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. ` await saveBuildLog({
); line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
await executeCommand({
command:
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
shell: true
});
} }
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`); const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
return commit.replace('\n', ''); return commit.replace('\n', '');
} }

View File

@@ -1,11 +1,12 @@
import { saveBuildLog } from "../buildPacks/common"; import { saveBuildLog } from "../buildPacks/common";
import { asyncExecShell } from "../common"; import { executeCommand } from "../common";
export default async function ({ export default async function ({
applicationId, applicationId,
workdir, workdir,
repodir, repodir,
htmlUrl, htmlUrl,
gitCommitHash,
repository, repository,
branch, branch,
buildId, buildId,
@@ -20,34 +21,43 @@ export default async function ({
branch: string; branch: string;
buildId: string; buildId: string;
repodir: string; repodir: string;
gitCommitHash: string;
privateSshKey: string; privateSshKey: string;
customPort: number; customPort: number;
forPublic: boolean; forPublic: boolean;
}): Promise<string> { }): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, ''); const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
if (!forPublic) { if (!forPublic) {
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`); await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`); await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
} }
await saveBuildLog({ await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`, line: `Cloning ${repository}:${branch}...`,
buildId, buildId,
applicationId applicationId
}); });
if (gitCommitHash) {
await saveBuildLog({
line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
if (forPublic) { if (forPublic) {
await asyncExecShell( await executeCommand({
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. ` command:
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
}
); );
} else { } else {
await asyncExecShell( await executeCommand({
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. ` command:
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
}
); );
} }
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`); const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
return commit.replace('\n', ''); return commit.replace('\n', '');
} }

View File

@@ -9,7 +9,7 @@ Bree.extend(TSBree);
const options: any = { const options: any = {
defaultExtension: 'js', defaultExtension: 'js',
logger: new Cabin(), logger: false,
// logger: false, // logger: false,
// workerMessageHandler: async ({ name, message }) => { // workerMessageHandler: async ({ name, message }) => {
// if (name === 'deployApplication' && message?.deploying) { // if (name === 'deployApplication' && message?.deploying) {

View File

@@ -1,170 +1,47 @@
import { isDev } from "./common"; import { isARM, isDev } from './common';
import fs from 'fs/promises'; import fs from 'fs/promises';
export async function getTemplates() { export async function getTemplates() {
let templates: any = []; const templatePath = isDev ? './templates.json' : '/app/templates.json';
if (isDev) { const open = await fs.open(templatePath, 'r');
templates = JSON.parse(await (await fs.readFile('./templates.json')).toString()) try {
} else { let data = await open.readFile({ encoding: 'utf-8' });
templates = JSON.parse(await (await fs.readFile('/app/templates.json')).toString()) let jsonData = JSON.parse(data);
} if (isARM(process.arch)) {
// if (!isDev) { jsonData = jsonData.filter((d) => d.arch !== 'amd64');
// templates.push({ }
// "templateVersion": "1.0.0", return jsonData;
// "defaultVersion": "latest", } catch (error) {
// "name": "Test-Fake-Service", return [];
// "description": "", } finally {
// "services": { await open?.close();
// "$$id": { }
// "name": "Test-Fake-Service",
// "depends_on": [
// "$$id-postgresql",
// "$$id-redis"
// ],
// "image": "weblate/weblate:$$core_version",
// "volumes": [
// "$$id-data:/app/data",
// ],
// "environment": [
// `POSTGRES_SECRET=$$secret_postgres_secret`,
// `WEBLATE_SITE_DOMAIN=$$config_weblate_site_domain`,
// `WEBLATE_ADMIN_PASSWORD=$$secret_weblate_admin_password`,
// `POSTGRES_PASSWORD=$$secret_postgres_password`,
// `POSTGRES_USER=$$config_postgres_user`,
// `POSTGRES_DATABASE=$$config_postgres_db`,
// `POSTGRES_HOST=$$id-postgresql`,
// `POSTGRES_PORT=5432`,
// `REDIS_HOST=$$id-redis`,
// ],
// "ports": [
// "8080"
// ]
// },
// "$$id-postgresql": {
// "name": "PostgreSQL",
// "depends_on": [],
// "image": "postgres:14-alpine",
// "volumes": [
// "$$id-postgresql-data:/var/lib/postgresql/data",
// ],
// "environment": [
// "POSTGRES_USER=$$config_postgres_user",
// "POSTGRES_PASSWORD=$$secret_postgres_password",
// "POSTGRES_DB=$$config_postgres_db",
// ],
// "ports": []
// },
// "$$id-redis": {
// "name": "Redis",
// "depends_on": [],
// "image": "redis:7-alpine",
// "volumes": [
// "$$id-redis-data:/data",
// ],
// "environment": [],
// "ports": [],
// }
// },
// "variables": [
// {
// "id": "$$config_weblate_site_domain",
// "main": "$$id",
// "name": "WEBLATE_SITE_DOMAIN",
// "label": "Weblate Domain",
// "defaultValue": "$$generate_domain",
// "description": "",
// },
// {
// "id": "$$secret_weblate_admin_password",
// "main": "$$id",
// "name": "WEBLATE_ADMIN_PASSWORD",
// "label": "Weblate Admin Password",
// "defaultValue": "$$generate_password",
// "description": "",
// "extras": {
// "isVisibleOnUI": true,
// }
// },
// {
// "id": "$$secret_weblate_admin_password2",
// "name": "WEBLATE_ADMIN_PASSWORD2",
// "label": "Weblate Admin Password2",
// "defaultValue": "$$generate_password",
// "description": "",
// },
// {
// "id": "$$config_postgres_user",
// "main": "$$id-postgresql",
// "name": "POSTGRES_USER",
// "label": "PostgreSQL User",
// "defaultValue": "$$generate_username",
// "description": "",
// },
// {
// "id": "$$secret_postgres_password",
// "main": "$$id-postgresql",
// "name": "POSTGRES_PASSWORD",
// "label": "PostgreSQL Password",
// "defaultValue": "$$generate_password(32)",
// "description": "",
// },
// {
// "id": "$$secret_postgres_password_hex32",
// "name": "POSTGRES_PASSWORD_hex32",
// "label": "PostgreSQL Password hex32",
// "defaultValue": "$$generate_hex(32)",
// "description": "",
// },
// {
// "id": "$$config_postgres_something_hex32",
// "name": "POSTGRES_SOMETHING_HEX32",
// "label": "PostgreSQL Something hex32",
// "defaultValue": "$$generate_hex(32)",
// "description": "",
// },
// {
// "id": "$$config_postgres_db",
// "main": "$$id-postgresql",
// "name": "POSTGRES_DB",
// "label": "PostgreSQL Database",
// "defaultValue": "weblate",
// "description": "",
// },
// {
// "id": "$$secret_postgres_secret",
// "name": "POSTGRES_SECRET",
// "label": "PostgreSQL Secret",
// "defaultValue": "",
// "description": "",
// },
// ]
// })
// }
return templates
} }
const compareSemanticVersions = (a: string, b: string) => { const compareSemanticVersions = (a: string, b: string) => {
const a1 = a.split('.'); const a1 = a.split('.');
const b1 = b.split('.'); const b1 = b.split('.');
const len = Math.min(a1.length, b1.length); const len = Math.min(a1.length, b1.length);
for (let i = 0; i < len; i++) { for (let i = 0; i < len; i++) {
const a2 = +a1[i] || 0; const a2 = +a1[i] || 0;
const b2 = +b1[i] || 0; const b2 = +b1[i] || 0;
if (a2 !== b2) { if (a2 !== b2) {
return a2 > b2 ? 1 : -1; return a2 > b2 ? 1 : -1;
} }
} }
return b1.length - a1.length; return b1.length - a1.length;
}; };
export async function getTags(type: string) { export async function getTags(type: string) {
if (type) { try {
let tags: any = []; if (type) {
if (isDev) { const tagsPath = isDev ? './tags.json' : '/app/tags.json';
tags = JSON.parse(await (await fs.readFile('./tags.json')).toString()) const data = await fs.readFile(tagsPath, 'utf8');
} else { let tags = JSON.parse(data);
tags = JSON.parse(await (await fs.readFile('/app/tags.json')).toString()) if (tags) {
} tags = tags.find((tag: any) => tag.name.includes(type));
tags = tags.find((tag: any) => tag.name.includes(type)) tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
tags.tags = tags.tags.sort(compareSemanticVersions).reverse(); return tags;
return tags }
} }
return [] } catch (error) {
return [];
}
} }

View File

@@ -1,5 +1,5 @@
import { prisma } from '../common'; import { decrypt, prisma } from '../common';
export async function removeService({ id }: { id: string }): Promise<void> { export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } }); await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
@@ -22,4 +22,18 @@ export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.taiga.deleteMany({ where: { serviceId: id } }); await prisma.taiga.deleteMany({ where: { serviceId: id } });
await prisma.service.delete({ where: { id } }); await prisma.service.delete({ where: { id } });
}
export async function verifyAndDecryptServiceSecrets(id: string) {
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
let decryptedSecrets = secrets.map(secret => {
const { name, value } = secret
if (value) {
let rawValue = decrypt(value)
rawValue = rawValue.replaceAll(/\$/gi, '$$$')
return { name, value: rawValue }
}
return { name, value }
})
return decryptedSecrets
} }

View File

@@ -2,11 +2,12 @@ import type { FastifyReply, FastifyRequest } from 'fastify';
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import path from 'path'; import path from 'path';
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeDockerCmd, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common'; import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeCommand, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers'; import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
import { ServiceStartStop } from '../../routes/api/v1/services/types'; import { ServiceStartStop } from '../../routes/api/v1/services/types';
import { OnlyId } from '../../types'; import { OnlyId } from '../../types';
import { verifyAndDecryptServiceSecrets } from './common';
export async function stopService(request: FastifyRequest<ServiceStartStop>) { export async function stopService(request: FastifyRequest<ServiceStartStop>) {
try { try {
@@ -14,14 +15,19 @@ export async function stopService(request: FastifyRequest<ServiceStartStop>) {
const teamId = request.user.teamId; const teamId = request.user.teamId;
const { destinationDockerId } = await getServiceFromDB({ id, teamId }); const { destinationDockerId } = await getServiceFromDB({ id, teamId });
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId, dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0` command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
})
await executeDockerCmd({
dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
}) })
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
}
}
}
return {} return {}
} }
throw { status: 500, message: 'Could not stop containers.' } throw { status: 500, message: 'Could not stop containers.' }
@@ -34,7 +40,7 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
const { id } = request.params; const { id } = request.params;
const teamId = request.user.teamId; const teamId = request.user.teamId;
const service = await getServiceFromDB({ id, teamId }); const service = await getServiceFromDB({ id, teamId });
const arm = isARM(service.arch) const arm = isARM(service.arch);
const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } = const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } =
service; service;
@@ -65,15 +71,17 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
} }
} }
} }
const secrets = await verifyAndDecryptServiceSecrets(id)
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
for (const secret of secrets) { for (const secret of secrets) {
const { name, value } = secret const { name, value } = secret
if (value) { if (value) {
const foundEnv = !!template.services[s].environment?.find(env => env.startsWith(`${name}=`)) const foundEnv = !!template.services[s].environment?.find(env => env.startsWith(`${name}=`))
const foundNewEnv = !!newEnvironments?.find(env => env.startsWith(`${name}=`)) const foundNewEnv = !!newEnvironments?.find(env => env.startsWith(`${name}=`))
if (foundEnv && !foundNewEnv) { if (foundEnv && !foundNewEnv) {
newEnvironments.push(`${name}=${decrypt(value)}`) newEnvironments.push(`${name}=${value}`)
}
if (!foundEnv && !foundNewEnv && s === id) {
newEnvironments.push(`${name}=${value}`)
} }
} }
} }
@@ -103,15 +111,34 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
} }
} }
} }
let ports = []
if (template.services[s].proxy?.length > 0) {
for (const proxy of template.services[s].proxy) {
if (proxy.hostPort) {
ports.push(`${proxy.hostPort}:${proxy.port}`)
}
}
} else {
if (template.services[s].ports?.length === 1) {
for (const port of template.services[s].ports) {
if (exposePort) {
ports.push(`${exposePort}:${port}`)
}
}
}
}
let image = template.services[s].image
if (arm && template.services[s].imageArm) {
image = template.services[s].imageArm
}
config[s] = { config[s] = {
container_name: s, container_name: s,
build: template.services[s].build || undefined, build: template.services[s].build || undefined,
command: template.services[s].command, command: template.services[s].command,
entrypoint: template.services[s]?.entrypoint, entrypoint: template.services[s]?.entrypoint,
image: arm ? template.services[s].imageArm : template.services[s].image, image,
expose: template.services[s].ports, expose: template.services[s].ports,
...(exposePort ? { ports: [`${exposePort}:${exposePort}`] } : {}), ports: ports.length > 0 ? ports : undefined,
volumes: Array.from(volumes), volumes: Array.from(volumes),
environment: newEnvironments, environment: newEnvironments,
depends_on: template.services[s]?.depends_on, depends_on: template.services[s]?.depends_on,
@@ -121,7 +148,6 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
labels: makeLabelForServices(type), labels: makeLabelForServices(type),
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
} }
// Generate files for builds // Generate files for builds
if (template.services[s]?.files?.length > 0) { if (template.services[s]?.files?.length > 0) {
if (!config[s].build) { if (!config[s].build) {
@@ -161,21 +187,37 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
// Workaround: Stop old minio proxies // Workaround: Stop old minio proxies
if (service.type === 'minio') { if (service.type === 'minio') {
try { try {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: command:
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container stop -t 0` `docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
}); });
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
}
}
}
} catch (error) { } } catch (error) { }
try { try {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: command:
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container rm -f` `docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
}); });
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
}
}
}
} catch (error) { } } catch (error) { }
} }
return {} return {}
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -185,16 +227,16 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) { async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
try { try {
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' }) fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` }) await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
} catch (error) { } } catch (error) { }
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' }) fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` }) await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' }) fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` }) await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' }) fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` }) await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
await asyncSleep(1000); await asyncSleep(1000);
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` }) await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 }) fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
} }
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) { export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
@@ -206,7 +248,7 @@ export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply:
destinationDocker, destinationDocker,
} = await getServiceFromDB({ id, teamId }); } = await getServiceFromDB({ id, teamId });
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker exec ${id} migrate` command: `docker exec ${id} migrate`
}) })

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { OnlyId } from '../../../../types'; import { OnlyId } from '../../../../types';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers'; import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getDockerImages, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRegistry, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types'; import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartApplication, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
@@ -21,7 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request)); fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
fastify.post<OnlyId>('/:id/restart', async (request, reply) => await restartApplication(request, reply)); fastify.post<RestartApplication>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply)); fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply)); fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
@@ -45,7 +45,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request)); fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply)); fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
// fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request)); fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request)); fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request)); fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
@@ -53,6 +52,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/:id/usage', async (request) => await getUsage(request)) fastify.get('/:id/usage', async (request) => await getUsage(request))
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request)) fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
fastify.get('/:id/images', async (request) => await getDockerImages(request))
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request)) fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply)); fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
@@ -64,6 +65,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request)); fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply)); fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
fastify.post('/:id/configuration/registry', async (request, reply) => await saveRegistry(request, reply));
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply)); fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request)); fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));

View File

@@ -19,12 +19,15 @@ export interface SaveApplication extends OnlyId {
denoMainFile: string, denoMainFile: string,
denoOptions: string, denoOptions: string,
baseImage: string, baseImage: string,
gitCommitHash: string,
baseBuildImage: string, baseBuildImage: string,
deploymentType: string, deploymentType: string,
baseDatabaseBranch: string, baseDatabaseBranch: string,
dockerComposeFile: string, dockerComposeFile: string,
dockerComposeFileLocation: string, dockerComposeFileLocation: string,
dockerComposeConfiguration: string dockerComposeConfiguration: string,
simpleDockerfile: string,
dockerRegistryImageName: string
} }
} }
export interface SaveApplicationSettings extends OnlyId { export interface SaveApplicationSettings extends OnlyId {
@@ -55,7 +58,7 @@ export interface GetImages {
Body: { buildPack: string, deploymentType: string } Body: { buildPack: string, deploymentType: string }
} }
export interface SaveApplicationSource extends OnlyId { export interface SaveApplicationSource extends OnlyId {
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string } Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string, simpleDockerfile?: string }
} }
export interface CheckRepository extends OnlyId { export interface CheckRepository extends OnlyId {
Querystring: { repository: string, branch: string } Querystring: { repository: string, branch: string }
@@ -140,4 +143,12 @@ export interface RestartPreviewApplication {
id: string, id: string,
pullmergeRequestId: string | null, pullmergeRequestId: string | null,
} }
}
export interface RestartApplication {
Params: {
id: string,
},
Body: {
imageId: string | null,
}
} }

View File

@@ -1,24 +1,31 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { errorHandler, listSettings, version } from '../../../../lib/common'; import { errorHandler, isARM, listSettings, version } from '../../../../lib/common';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => { fastify.addHook('onRequest', async (request) => {
const teamId = request.user?.teamId; try {
const settings = await listSettings() await request.jwtVerify();
try { } catch (error) {
return { return;
ipv4: teamId ? settings.ipv4 : 'nope', }
ipv6: teamId ? settings.ipv6 : 'nope', });
version, fastify.get('/', async (request) => {
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true', const teamId = request.user?.teamId;
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON, const settings = await listSettings();
isRegistrationEnabled: settings.isRegistrationEnabled, try {
} return {
} catch ({ status, message }) { ipv4: teamId ? settings.ipv4 : null,
return errorHandler({ status, message }) ipv6: teamId ? settings.ipv6 : null,
} version,
}); whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
isRegistrationEnabled: settings.isRegistrationEnabled,
isARM: isARM(process.arch)
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
});
}; };
export default root; export default root;

View File

@@ -3,7 +3,7 @@ import type { FastifyRequest } from 'fastify';
import { FastifyReply } from 'fastify'; import { FastifyReply } from 'fastify';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import fs from 'fs/promises'; import fs from 'fs/promises';
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common'; import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeCommand, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
@@ -89,7 +89,7 @@ export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
const { destinationDockerId, destinationDocker } = database; const { destinationDockerId, destinationDocker } = database;
if (destinationDockerId) { if (destinationDockerId) {
try { try {
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` }) const { stdout } = await executeCommand({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
if (JSON.parse(stdout).Running) { if (JSON.parse(stdout).Running) {
isRunning = true; isRunning = true;
@@ -208,7 +208,7 @@ export async function saveDatabaseDestination(request: FastifyRequest<SaveDataba
if (destinationDockerId) { if (destinationDockerId) {
if (type && version) { if (type && version) {
const baseImage = getDatabaseImage(type, arch); const baseImage = getDatabaseImage(type, arch);
executeDockerCmd({ dockerId, command: `docker pull ${baseImage}:${version}` }) executeCommand({ dockerId, command: `docker pull ${baseImage}:${version}` })
} }
} }
return reply.code(201).send({}) return reply.code(201).send({})
@@ -298,7 +298,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
}; };
const composeFileDestination = `${workdir}/docker-compose.yaml`; const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` }) await executeCommand({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort); if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
return {}; return {};
@@ -347,7 +347,7 @@ export async function getDatabaseLogs(request: FastifyRequest<GetDatabaseLogs>)
// const found = await checkContainer({ dockerId, container: id }) // const found = await checkContainer({ dockerId, container: id })
// if (found) { // if (found) {
const { default: ansi } = await import('strip-ansi') const { default: ansi } = await import('strip-ansi')
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` }) const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` })
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const logs = stripLogsStderr.concat(stripLogsStdout) const logs = stripLogsStderr.concat(stripLogsStdout)

View File

@@ -4,7 +4,7 @@ import sshConfig from 'ssh-config'
import fs from 'fs/promises' import fs from 'fs/promises'
import os from 'os'; import os from 'os';
import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, executeSSHCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common'; import { createRemoteEngineConfiguration, decrypt, errorHandler, executeCommand, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
import { checkContainer } from '../../../../lib/docker'; import { checkContainer } from '../../../../lib/docker';
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
@@ -79,9 +79,9 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
if (id === 'new') { if (id === 'new') {
if (engine) { if (engine) {
const { stdout } = await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network ls --filter 'name=^${network}$' --format '{{json .}}'`); const { stdout } = await await executeCommand({ command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'` });
if (stdout === '') { if (stdout === '') {
await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network create --attachable ${network}`); await await executeCommand({ command: `docker network create --attachable ${network}` });
} }
await prisma.destinationDocker.create({ await prisma.destinationDocker.create({
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed } data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
@@ -103,7 +103,7 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
return reply.code(201).send({ id: destination.id }); return reply.code(201).send({ id: destination.id });
} else { } else {
const destination = await prisma.destinationDocker.create({ const destination = await prisma.destinationDocker.create({
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort } data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort: Number(remotePort) }
}); });
return reply.code(201).send({ id: destination.id }) return reply.code(201).send({ id: destination.id })
} }
@@ -122,13 +122,13 @@ export async function deleteDestination(request: FastifyRequest<OnlyId>) {
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } }); const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
if (isCoolifyProxyUsed) { if (isCoolifyProxyUsed) {
if (engine || remoteVerified) { if (engine || remoteVerified) {
const { stdout: found } = await executeDockerCmd({ const { stdout: found } = await executeCommand({
dockerId: id, dockerId: id,
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'` command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
}) })
if (found) { if (found) {
await executeDockerCmd({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` }) await executeCommand({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
await executeDockerCmd({ dockerId: id, command: `docker network rm ${network}` }) await executeCommand({ dockerId: id, command: `docker network rm ${network}` })
} }
} }
} }
@@ -203,22 +203,31 @@ export async function assignSSHKey(request: FastifyRequest) {
} }
} }
export async function verifyRemoteDockerEngineFn(id: string) { export async function verifyRemoteDockerEngineFn(id: string) {
await createRemoteEngineConfiguration(id);
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } }) const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
const host = `ssh://${remoteIpAddress}-remote` const daemonJson = `daemon-${id}.json`
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
if (!stdout) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
}
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
if (!coolifyNetwork) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
try { try {
const { stdout: daemonJson } = await executeSSHCmd({ dockerId: id, command: `cat /etc/docker/daemon.json` }); await executeCommand({ sshCommand: true, command: `docker network inspect ${network}`, dockerId: id });
let daemonJsonParsed = JSON.parse(daemonJson); } catch (error) {
let isUpdated = false; await executeCommand({ command: `docker network create --attachable ${network}`, dockerId: id });
}
try {
await executeCommand({ sshCommand: true, command: `docker network inspect coolify-infra`, dockerId: id });
} catch (error) {
await executeCommand({ command: `docker network create --attachable coolify-infra`, dockerId: id });
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
let isUpdated = false;
let daemonJsonParsed = {
"live-restore": true,
"features": {
"buildkit": true
}
};
try {
const { stdout: daemonJson } = await executeCommand({ sshCommand: true, dockerId: id, command: `cat /etc/docker/daemon.json` });
daemonJsonParsed = JSON.parse(daemonJson);
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) { if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
isUpdated = true; isUpdated = true;
daemonJsonParsed['live-restore'] = true daemonJsonParsed['live-restore'] = true
@@ -230,21 +239,19 @@ export async function verifyRemoteDockerEngineFn(id: string) {
buildkit: true buildkit: true
} }
} }
if (isUpdated) {
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
}
} catch (error) { } catch (error) {
const daemonJsonParsed = { isUpdated = true;
"live-restore": true, }
"features": { try {
"buildkit": true if (isUpdated) {
} await executeCommand({ shell: true, command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}` })
await executeCommand({ dockerId: id, command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json` });
await executeCommand({ command: `rm /tmp/${daemonJson}` })
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
} }
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
} finally {
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } }) await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
} catch (error) {
throw new Error('Error while verifying remote docker engine')
} }
} }
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) { export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {

View File

@@ -4,7 +4,6 @@ import bcrypt from "bcryptjs";
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import { import {
asyncExecShell,
asyncSleep, asyncSleep,
cleanupDockerStorage, cleanupDockerStorage,
errorHandler, errorHandler,
@@ -13,6 +12,8 @@ import {
prisma, prisma,
uniqueName, uniqueName,
version, version,
sentryDSN,
executeCommand,
} from "../../../lib/common"; } from "../../../lib/common";
import { scheduler } from "../../../lib/scheduler"; import { scheduler } from "../../../lib/scheduler";
import type { FastifyReply, FastifyRequest } from "fastify"; import type { FastifyReply, FastifyRequest } from "fastify";
@@ -24,6 +25,35 @@ export async function hashPassword(password: string): Promise<string> {
return bcrypt.hash(password, saltRounds); return bcrypt.hash(password, saltRounds);
} }
export async function backup(request: FastifyRequest) {
try {
const { backupData } = request.params;
let std = null;
const [id, backupType, type, zipped, storage] = backupData.split(':')
console.log(id, backupType, type, zipped, storage)
const database = await prisma.database.findUnique({ where: { id } })
if (database) {
// await executeDockerCmd({
// dockerId: database.destinationDockerId,
// command: `docker pull coollabsio/backup:latest`,
// })
std = await executeCommand({
dockerId: database.destinationDockerId,
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
})
}
if (std.stdout) {
return std.stdout;
}
if (std.stderr) {
return std.stderr;
}
return 'nope';
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function cleanupManually(request: FastifyRequest) { export async function cleanupManually(request: FastifyRequest) {
try { try {
const { serverId } = request.body; const { serverId } = request.body;
@@ -110,14 +140,10 @@ export async function update(request: FastifyRequest<Update>) {
try { try {
if (!isDev) { if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst(); const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`); await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
await asyncExecShell(`env | grep COOLIFY > .env`); await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
await asyncExecShell( await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` });
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` });
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
return {}; return {};
} else { } else {
await asyncSleep(2000); await asyncSleep(2000);
@@ -146,7 +172,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
const teamId = request.user.teamId; const teamId = request.user.teamId;
if (teamId === "0") { if (teamId === "0") {
if (!isDev) { if (!isDev) {
asyncExecShell(`docker restart coolify`); await executeCommand({ command: `docker restart coolify` });
return {}; return {};
} else { } else {
return {}; return {};
@@ -189,7 +215,7 @@ export async function showDashboard(request: FastifyRequest) {
let foundUnconfiguredApplication = false; let foundUnconfiguredApplication = false;
for (const application of applications) { for (const application of applications) {
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") { if (((!application.buildPack || !application.branch) && !application.simpleDockerfile) || !application.destinationDockerId || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
foundUnconfiguredApplication = true foundUnconfiguredApplication = true
} }
} }
@@ -398,7 +424,8 @@ export async function getCurrentUser(
} }
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } }) const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
return { return {
settings: await prisma.setting.findFirst(), settings: await prisma.setting.findUnique({ where: { id: "0" } }),
sentryDSN,
pendingInvitations, pendingInvitations,
token, token,
...request.user, ...request.user,

View File

@@ -1,5 +1,5 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers'; import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify, backup } from './handlers';
import { GetCurrentUser } from './types'; import { GetCurrentUser } from './types';
export interface Update { export interface Update {
@@ -52,6 +52,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post('/internal/cleanup', { fastify.post('/internal/cleanup', {
onRequest: [fastify.authenticate] onRequest: [fastify.authenticate]
}, async (request) => await cleanupManually(request)); }, async (request) => await cleanupManually(request));
// fastify.get('/internal/backup/:backupData', {
// onRequest: [fastify.authenticate]
// }, async (request) => await backup(request));
}; };
export default root; export default root;

View File

@@ -1,5 +1,5 @@
import type { FastifyRequest } from 'fastify'; import type { FastifyRequest } from 'fastify';
import { errorHandler, executeDockerCmd, prisma, createRemoteEngineConfiguration, executeSSHCmd } from '../../../../lib/common'; import { errorHandler, prisma, executeCommand } from '../../../../lib/common';
import os from 'node:os'; import os from 'node:os';
import osu from 'node-os-utils'; import osu from 'node-os-utils';
@@ -71,10 +71,10 @@ export async function showUsage(request: FastifyRequest) {
let { remoteEngine } = request.query let { remoteEngine } = request.query
remoteEngine = remoteEngine === 'true' ? true : false remoteEngine = remoteEngine === 'true' ? true : false
if (remoteEngine) { if (remoteEngine) {
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` }) const { stdout: stats } = await executeCommand({ sshCommand: true, dockerId: id, command: `vmstat -s` })
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` }) const { stdout: disks } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` }) const { stdout: cpus } = await executeCommand({ sshCommand: true, dockerId: id, command: `nproc --all` })
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` }) const { stdout: cpuUsage } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
const parsed: any = parseFromText(stats) const parsed: any = parseFromText(stats)
return { return {
usage: { usage: {

View File

@@ -4,7 +4,7 @@ import yaml from 'js-yaml';
import bcrypt from 'bcryptjs'; import bcrypt from 'bcryptjs';
import cuid from 'cuid'; import cuid from 'cuid';
import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort, listSettings } from '../../../../lib/common'; import { prisma, uniqueName, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, checkDomainsIsValidInDNS, checkExposedPort, listSettings, generateToken, executeCommand } from '../../../../lib/common';
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import { checkContainer, } from '../../../../lib/docker'; import { checkContainer, } from '../../../../lib/docker';
import { removeService } from '../../../../lib/services/common'; import { removeService } from '../../../../lib/services/common';
@@ -48,14 +48,19 @@ export async function cleanupUnconfiguredServices(request: FastifyRequest) {
for (const service of services) { for (const service of services) {
if (!service.fqdn) { if (!service.fqdn) {
if (service.destinationDockerId) { if (service.destinationDockerId) {
await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: service.destinationDockerId, dockerId: service.destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0` command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
})
await executeDockerCmd({
dockerId: service.destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
}) })
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({ dockerId: service.destinationDockerId, command: `docker stop -t 0 ${container}` })
await executeCommand({ dockerId: service.destinationDockerId, command: `docker rm --force ${container}` })
}
}
}
} }
await removeService({ id: service.id }); await removeService({ id: service.id });
} }
@@ -73,55 +78,61 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
const { destinationDockerId, settings } = service; const { destinationDockerId, settings } = service;
let payload = {} let payload = {}
if (destinationDockerId) { if (destinationDockerId) {
const { stdout: containers } = await executeDockerCmd({ const { stdout: containers } = await executeCommand({
dockerId: service.destinationDocker.id, dockerId: service.destinationDocker.id,
command: command:
`docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'` `docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
}); });
const containersArray = containers.trim().split('\n'); if (containers) {
if (containersArray.length > 0 && containersArray[0] !== '') { const containersArray = containers.trim().split('\n');
const templates = await getTemplates(); if (containersArray.length > 0 && containersArray[0] !== '') {
let template = templates.find(t => t.type === service.type); const templates = await getTemplates();
template = JSON.parse(JSON.stringify(template).replaceAll('$$id', service.id)); let template = templates.find(t => t.type === service.type);
for (const container of containersArray) { const templateStr = JSON.stringify(template)
let isRunning = false; if (templateStr) {
let isExited = false; template = JSON.parse(templateStr.replaceAll('$$id', service.id));
let isRestarting = false; }
let isExcluded = false; for (const container of containersArray) {
const containerObj = JSON.parse(container); let isRunning = false;
const exclude = template.services[containerObj.Names]?.exclude; let isExited = false;
if (exclude) { let isRestarting = false;
let isExcluded = false;
const containerObj = JSON.parse(container);
const exclude = template?.services[containerObj.Names]?.exclude;
if (exclude) {
payload[containerObj.Names] = {
status: {
isExcluded: true,
isRunning: false,
isExited: false,
isRestarting: false,
}
}
continue;
}
const status = containerObj.State
if (status === 'running') {
isRunning = true;
}
if (status === 'exited') {
isExited = true;
}
if (status === 'restarting') {
isRestarting = true;
}
payload[containerObj.Names] = { payload[containerObj.Names] = {
status: { status: {
isExcluded: true, isExcluded,
isRunning: false, isRunning,
isExited: false, isExited,
isRestarting: false, isRestarting
} }
} }
continue;
}
const status = containerObj.State
if (status === 'running') {
isRunning = true;
}
if (status === 'exited') {
isExited = true;
}
if (status === 'restarting') {
isRestarting = true;
}
payload[containerObj.Names] = {
status: {
isExcluded,
isRunning,
isExited,
isRestarting
}
} }
} }
} }
} }
return payload return payload
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -149,18 +160,24 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
} }
} }
parsedTemplate[realKey] = { parsedTemplate[realKey] = {
value,
name, name,
documentation: value.documentation || foundTemplate.documentation || 'https://docs.coollabs.io', documentation: value.documentation || foundTemplate.documentation || 'https://docs.coollabs.io',
image: value.image, image: value.image,
files: value?.files,
environment: [], environment: [],
fqdns: [], fqdns: [],
hostPorts: [],
proxy: {} proxy: {}
} }
if (value.environment?.length > 0) { if (value.environment?.length > 0) {
for (const env of value.environment) { for (const env of value.environment) {
let [envKey, ...envValue] = env.split('=') let [envKey, ...envValue] = env.split('=')
envValue = envValue.join("=") envValue = envValue.join("=")
const variable = foundTemplate.variables.find(v => v.name === envKey) || foundTemplate.variables.find(v => v.id === envValue) let variable = null
if (foundTemplate?.variables) {
variable = foundTemplate?.variables.find(v => v.name === envKey) || foundTemplate?.variables.find(v => v.id === envValue)
}
if (variable) { if (variable) {
const id = variable.id.replaceAll('$$', '') const id = variable.id.replaceAll('$$', '')
const label = variable?.label const label = variable?.label
@@ -186,15 +203,24 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
if (value?.proxy && value.proxy.length > 0) { if (value?.proxy && value.proxy.length > 0) {
for (const proxyValue of value.proxy) { for (const proxyValue of value.proxy) {
if (proxyValue.domain) { if (proxyValue.domain) {
const variable = foundTemplate.variables.find(v => v.id === proxyValue.domain) const variable = foundTemplate?.variables.find(v => v.id === proxyValue.domain)
if (variable) { if (variable) {
const { id, name, label, description, defaultValue, required = false } = variable const { id, name, label, description, defaultValue, required = false } = variable
const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id , variableName: proxyValue.domain } }) const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id, variableName: proxyValue.domain } })
parsedTemplate[realKey].fqdns.push( parsedTemplate[realKey].fqdns.push(
{ id, name, value: found?.value || '', label, description, defaultValue, required } { id, name, value: found?.value || '', label, description, defaultValue, required }
) )
} }
}
if (proxyValue.hostPort) {
const variable = foundTemplate?.variables.find(v => v.id === proxyValue.hostPort)
if (variable) {
const { id, name, label, description, defaultValue, required = false } = variable
const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id, variableName: proxyValue.hostPort } })
parsedTemplate[realKey].hostPorts.push(
{ id, name, value: found?.value || '', label, description, defaultValue, required }
)
}
} }
} }
} }
@@ -208,7 +234,7 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
strParsedTemplate = strParsedTemplate.replaceAll('$$id', service.id) strParsedTemplate = strParsedTemplate.replaceAll('$$id', service.id)
strParsedTemplate = strParsedTemplate.replaceAll('$$core_version', service.version || foundTemplate.defaultVersion) strParsedTemplate = strParsedTemplate.replaceAll('$$core_version', service.version || foundTemplate.defaultVersion)
// replace $$fqdn // replace $$workdir
if (workdir) { if (workdir) {
strParsedTemplate = strParsedTemplate.replaceAll('$$workdir', workdir) strParsedTemplate = strParsedTemplate.replaceAll('$$workdir', workdir)
} }
@@ -217,15 +243,17 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
if (service.serviceSetting.length > 0) { if (service.serviceSetting.length > 0) {
for (const setting of service.serviceSetting) { for (const setting of service.serviceSetting) {
const { value, variableName } = setting const { value, variableName } = setting
const regex = new RegExp(`\\$\\$config_${variableName.replace('$$config_', '')}\\"`, 'gi') const regex = new RegExp(`\\$\\$config_${variableName.replace('$$config_', '')}\"`, 'gi')
if (value === '$$generate_fqdn') { if (value === '$$generate_fqdn') {
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + "\"" || '' + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '"' || '' + '"')
} else if (value === '$$generate_fqdn_slash') {
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '/' + '"')
} else if (value === '$$generate_domain') { } else if (value === '$$generate_domain') {
strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + '"')
} else if (service.destinationDocker?.network && value === '$$generate_network') { } else if (service.destinationDocker?.network && value === '$$generate_network') {
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + '"')
} else { } else {
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, value + '"')
} }
} }
} }
@@ -233,15 +261,16 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
// replace $$secret // replace $$secret
if (service.serviceSecret.length > 0) { if (service.serviceSecret.length > 0) {
for (const secret of service.serviceSecret) { for (const secret of service.serviceSecret) {
const { name, value } = secret let { name, value } = secret
const regexHashed = new RegExp(`\\$\\$hashed\\$\\$secret_${name}\\"`, 'gi') name = name.toLowerCase()
const regex = new RegExp(`\\$\\$secret_${name}\\"`, 'gi') const regexHashed = new RegExp(`\\$\\$hashed\\$\\$secret_${name}\"`, 'gi')
const regex = new RegExp(`\\$\\$secret_${name}\"`, 'gi')
if (value) { if (value) {
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, bcrypt.hashSync(value.replaceAll("\"", "\\\""), 10) + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, bcrypt.hashSync(value.replaceAll("\"", "\\\""), 10) + '"')
strParsedTemplate = strParsedTemplate.replaceAll(regex, value.replaceAll("\"", "\\\"") + "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, value.replaceAll("\"", "\\\"") + '"')
} else { } else {
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, "\"") strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, '' + '"')
strParsedTemplate = strParsedTemplate.replaceAll(regex, "\"") strParsedTemplate = strParsedTemplate.replaceAll(regex, '' + '"')
} }
} }
} }
@@ -291,42 +320,46 @@ export async function saveServiceType(request: FastifyRequest<SaveServiceType>,
let foundTemplate = templates.find(t => fixType(t.type) === fixType(type)) let foundTemplate = templates.find(t => fixType(t.type) === fixType(type))
if (foundTemplate) { if (foundTemplate) {
foundTemplate = JSON.parse(JSON.stringify(foundTemplate).replaceAll('$$id', id)) foundTemplate = JSON.parse(JSON.stringify(foundTemplate).replaceAll('$$id', id))
if (foundTemplate.variables.length > 0) { if (foundTemplate.variables) {
if (foundTemplate.variables.length > 0) {
for (const variable of foundTemplate.variables) {
const { defaultValue } = variable;
const regex = /^\$\$.*\((\d+)\)$/g;
const length = Number(regex.exec(defaultValue)?.[1]) || undefined
if (variable.defaultValue.startsWith('$$generate_password')) {
variable.value = generatePassword({ length });
} else if (variable.defaultValue.startsWith('$$generate_hex')) {
variable.value = generatePassword({ length, isHex: true });
} else if (variable.defaultValue.startsWith('$$generate_username')) {
variable.value = cuid();
} else if (variable.defaultValue.startsWith('$$generate_token')) {
variable.value = generateToken()
} else {
variable.value = variable.defaultValue || '';
}
const foundVariableSomewhereElse = foundTemplate.variables.find(v => v.defaultValue.includes(variable.id))
if (foundVariableSomewhereElse) {
foundVariableSomewhereElse.value = foundVariableSomewhereElse.value.replaceAll(variable.id, variable.value)
}
}
}
for (const variable of foundTemplate.variables) { for (const variable of foundTemplate.variables) {
const { defaultValue } = variable; if (variable.id.startsWith('$$secret_')) {
const regex = /^\$\$.*\((\d+)\)$/g; const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
const length = Number(regex.exec(defaultValue)?.[1]) || undefined if (!found) {
if (variable.defaultValue.startsWith('$$generate_password')) { await prisma.serviceSecret.create({
variable.value = generatePassword({ length }); data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
} else if (variable.defaultValue.startsWith('$$generate_hex')) { })
variable.value = generatePassword({ length, isHex: true }); }
} else if (variable.defaultValue.startsWith('$$generate_username')) {
variable.value = cuid();
} else {
variable.value = variable.defaultValue || '';
}
const foundVariableSomewhereElse = foundTemplate.variables.find(v => v.defaultValue.includes(variable.id))
if (foundVariableSomewhereElse) {
foundVariableSomewhereElse.value = foundVariableSomewhereElse.value.replaceAll(variable.id, variable.value)
}
}
}
for (const variable of foundTemplate.variables) {
if (variable.id.startsWith('$$secret_')) {
const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) {
await prisma.serviceSecret.create({
data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
})
}
} }
if (variable.id.startsWith('$$config_')) { if (variable.id.startsWith('$$config_')) {
const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } }) const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) { if (!found) {
await prisma.serviceSetting.create({ await prisma.serviceSetting.create({
data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } } data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } }
}) })
}
} }
} }
} }
@@ -418,7 +451,7 @@ export async function getServiceLogs(request: FastifyRequest<GetServiceLogs>) {
if (destinationDockerId) { if (destinationDockerId) {
try { try {
const { default: ansi } = await import('strip-ansi') const { default: ansi } = await import('strip-ansi')
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` }) const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const logs = stripLogsStderr.concat(stripLogsStdout) const logs = stripLogsStderr.concat(stripLogsStdout)
@@ -532,7 +565,7 @@ export async function saveService(request: FastifyRequest<SaveService>, reply: F
} }
if (isNew) { if (isNew) {
if (!variableName) { if (!variableName) {
variableName = foundTemplate.variables.find(v => v.name === name).id variableName = foundTemplate?.variables.find(v => v.name === name).id
} }
await prisma.serviceSetting.create({ data: { name, value, variableName, service: { connect: { id } } } }) await prisma.serviceSetting.create({ data: { name, value, variableName, service: { connect: { id } } } })
} }
@@ -724,7 +757,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
if (destinationDockerId) { if (destinationDockerId) {
const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL'); const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL');
if (databaseUrl) { if (databaseUrl) {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"` command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"`
}) })
@@ -745,9 +778,10 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
destinationDocker, destinationDocker,
} = await getServiceFromDB({ id, teamId }); } = await getServiceFromDB({ id, teamId });
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"` command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`,
shell: true
}) })
return await reply.code(201).send() return await reply.code(201).send()
} }
@@ -787,36 +821,42 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
if (user) ftpUser = user; if (user) ftpUser = user;
if (savedPassword) ftpPassword = decrypt(savedPassword); if (savedPassword) ftpPassword = decrypt(savedPassword);
const { stdout: password } = await asyncExecShell( // TODO: rewrite these to usable without shell
`echo ${ftpPassword} | openssl passwd -1 -stdin` const { stdout: password } = await executeCommand({
command:
`echo ${ftpPassword} | openssl passwd -1 -stdin`,
shell: true
}
); );
if (destinationDockerId) { if (destinationDockerId) {
try { try {
await fs.stat(hostkeyDir); await fs.stat(hostkeyDir);
} catch (error) { } catch (error) {
await asyncExecShell(`mkdir -p ${hostkeyDir}`); await executeCommand({ command: `mkdir -p ${hostkeyDir}` });
} }
if (!ftpHostKey) { if (!ftpHostKey) {
await asyncExecShell( await executeCommand({
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519` command:
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
}
); );
const { stdout: ftpHostKey } = await asyncExecShell(`cat ${hostkeyDir}/${id}.ed25519`); const { stdout: ftpHostKey } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.ed25519` });
await prisma.wordpress.update({ await prisma.wordpress.update({
where: { serviceId: id }, where: { serviceId: id },
data: { ftpHostKey: encrypt(ftpHostKey) } data: { ftpHostKey: encrypt(ftpHostKey) }
}); });
} else { } else {
await asyncExecShell(`echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`); await executeCommand({ command: `echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`, shell: true });
} }
if (!ftpHostKeyPrivate) { if (!ftpHostKeyPrivate) {
await asyncExecShell(`ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa`); await executeCommand({ command: `ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa` });
const { stdout: ftpHostKeyPrivate } = await asyncExecShell(`cat ${hostkeyDir}/${id}.rsa`); const { stdout: ftpHostKeyPrivate } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.rsa` });
await prisma.wordpress.update({ await prisma.wordpress.update({
where: { serviceId: id }, where: { serviceId: id },
data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) } data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) }
}); });
} else { } else {
await asyncExecShell(`echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`); await executeCommand({ command: `echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`, shell: true });
} }
await prisma.wordpress.update({ await prisma.wordpress.update({
@@ -831,9 +871,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
try { try {
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` }); const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
if (isRunning) { if (isRunning) {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp` command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
shell: true
}) })
} }
} catch (error) { } } catch (error) { }
@@ -877,9 +918,9 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
`${hostkeyDir}/${id}.sh`, `${hostkeyDir}/${id}.sh`,
`#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/` `#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/`
); );
await asyncExecShell(`chmod +x ${hostkeyDir}/${id}.sh`); await executeCommand({ command: `chmod +x ${hostkeyDir}/${id}.sh` });
await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose)); await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose));
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d` command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
}) })
@@ -896,9 +937,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
data: { ftpPublicPort: null } data: { ftpPublicPort: null }
}); });
try { try {
await executeDockerCmd({ await executeCommand({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp` command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
shell: true
}) })
} catch (error) { } catch (error) {
@@ -912,8 +954,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
return errorHandler({ status, message }) return errorHandler({ status, message })
} finally { } finally {
try { try {
await asyncExecShell( await executeCommand({
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh` command:
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
}
); );
} catch (error) { } } catch (error) { }

View File

@@ -1,9 +1,9 @@
import { promises as dns } from 'dns'; import { promises as dns } from 'dns';
import { X509Certificate } from 'node:crypto'; import { X509Certificate } from 'node:crypto';
import * as Sentry from '@sentry/node';
import type { FastifyReply, FastifyRequest } from 'fastify'; import type { FastifyReply, FastifyRequest } from 'fastify';
import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDev, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common'; import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, executeCommand, getDomain, isDev, isDNSValid, isDomainConfigured, listSettings, prisma, sentryDSN, version } from '../../../../lib/common';
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types'; import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
export async function listAllSettings(request: FastifyRequest) { export async function listAllSettings(request: FastifyRequest) {
@@ -11,6 +11,13 @@ export async function listAllSettings(request: FastifyRequest) {
const teamId = request.user.teamId; const teamId = request.user.teamId;
const settings = await listSettings(); const settings = await listSettings();
const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } }) const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } })
let registries = await prisma.dockerRegistry.findMany({ where: { team: { id: teamId } } })
registries = registries.map((registry) => {
if (registry.password) {
registry.password = decrypt(registry.password)
}
return registry
})
const unencryptedKeys = [] const unencryptedKeys = []
if (sshKeys.length > 0) { if (sshKeys.length > 0) {
for (const key of sshKeys) { for (const key of sshKeys) {
@@ -27,7 +34,8 @@ export async function listAllSettings(request: FastifyRequest) {
return { return {
settings, settings,
certificates: cns, certificates: cns,
sshKeys: unencryptedKeys sshKeys: unencryptedKeys,
registries
} }
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
@@ -35,7 +43,10 @@ export async function listAllSettings(request: FastifyRequest) {
} }
export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) { export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) {
try { try {
const { let {
previewSeparator,
numberOfDockerImagesKeptLocally,
doNotTrack,
fqdn, fqdn,
isAPIDebuggingEnabled, isAPIDebuggingEnabled,
isRegistrationEnabled, isRegistrationEnabled,
@@ -47,10 +58,29 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
DNSServers, DNSServers,
proxyDefaultRedirect proxyDefaultRedirect
} = request.body } = request.body
const { id } = await listSettings(); const { id, previewSeparator: SetPreviewSeparator } = await listSettings();
if (numberOfDockerImagesKeptLocally) {
numberOfDockerImagesKeptLocally = Number(numberOfDockerImagesKeptLocally)
}
if (previewSeparator == '') {
previewSeparator = '.'
}
if (SetPreviewSeparator != previewSeparator) {
const applications = await prisma.application.findMany({ where: { previewApplication: { some: { id: { not: undefined } } } }, include: { previewApplication: true } })
for (const application of applications) {
for (const preview of application.previewApplication) {
const { protocol } = new URL(preview.customDomain)
const { pullmergeRequestId } = preview
const { fqdn } = application
const newPreviewDomain = `${protocol}//${pullmergeRequestId}${previewSeparator}${getDomain(fqdn)}`
await prisma.previewApplication.update({ where: { id: preview.id }, data: { customDomain: newPreviewDomain } })
}
}
}
await prisma.setting.update({ await prisma.setting.update({
where: { id }, where: { id },
data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled, } data: { previewSeparator, numberOfDockerImagesKeptLocally, doNotTrack, isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled }
}); });
if (fqdn) { if (fqdn) {
await prisma.setting.update({ where: { id }, data: { fqdn } }); await prisma.setting.update({ where: { id }, data: { fqdn } });
@@ -59,6 +89,14 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
if (minPort && maxPort) { if (minPort && maxPort) {
await prisma.setting.update({ where: { id }, data: { minPort, maxPort } }); await prisma.setting.update({ where: { id }, data: { minPort, maxPort } });
} }
if (doNotTrack === false) {
// Sentry.init({
// dsn: sentryDSN,
// environment: isDev ? 'development' : 'production',
// release: version
// });
// console.log('Sentry initialized')
}
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
@@ -91,7 +129,7 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
if (fqdn) fqdn = fqdn.toLowerCase(); if (fqdn) fqdn = fqdn.toLowerCase();
const found = await isDomainConfigured({ id, fqdn }); const found = await isDomainConfigured({ id, fqdn });
if (found) { if (found) {
throw "Domain already configured"; throw { message: "Domain already configured" };
} }
if (isDNSCheckEnabled && !forceSave && !isDev) { if (isDNSCheckEnabled && !forceSave && !isDev) {
const hostname = request.hostname.split(':')[0] const hostname = request.hostname.split(':')[0]
@@ -131,8 +169,9 @@ export async function saveSSHKey(request: FastifyRequest<SaveSSHKey>, reply: Fas
} }
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) { export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
try { try {
const teamId = request.user.teamId;
const { id } = request.body; const { id } = request.body;
await prisma.sshKey.delete({ where: { id } }) await prisma.sshKey.deleteMany({ where: { id, teamId } })
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
@@ -141,9 +180,54 @@ export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply:
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) { export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
try { try {
const teamId = request.user.teamId;
const { id } = request.body; const { id } = request.body;
await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`) await executeCommand({ command: `docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`, shell: true })
await prisma.certificate.delete({ where: { id } }) await prisma.certificate.deleteMany({ where: { id, teamId } })
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function setDockerRegistry(request: FastifyRequest<SetDefaultRegistry>, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const { id, username, password } = request.body;
let encryptedPassword = ''
if (password) encryptedPassword = encrypt(password)
if (teamId === '0') {
await prisma.dockerRegistry.update({ where: { id }, data: { username, password: encryptedPassword } })
} else {
await prisma.dockerRegistry.updateMany({ where: { id, teamId }, data: { username, password: encryptedPassword } })
}
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function addDockerRegistry(request: FastifyRequest<AddDefaultRegistry>, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const { name, url, username, password } = request.body;
let encryptedPassword = ''
if (password) encryptedPassword = encrypt(password)
await prisma.dockerRegistry.create({ data: { name, url, username, password: encryptedPassword, team: { connect: { id: teamId } } } })
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteDockerRegistry(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const { id } = request.body;
await prisma.application.updateMany({ where: { dockerRegistryId: id }, data: { dockerRegistryId: null } })
await prisma.dockerRegistry.deleteMany({ where: { id, teamId } })
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })

View File

@@ -2,8 +2,8 @@ import { FastifyPluginAsync } from 'fastify';
import { X509Certificate } from 'node:crypto'; import { X509Certificate } from 'node:crypto';
import { encrypt, errorHandler, prisma } from '../../../../lib/common'; import { encrypt, errorHandler, prisma } from '../../../../lib/common';
import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers'; import { addDockerRegistry, checkDNS, checkDomain, deleteCertificates, deleteDockerRegistry, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey, setDockerRegistry } from './handlers';
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types'; import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
@@ -20,6 +20,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply)); fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply)); fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
fastify.post<SetDefaultRegistry>('/registry', async (request, reply) => await setDockerRegistry(request, reply));
fastify.post<AddDefaultRegistry>('/registry/new', async (request, reply) => await addDockerRegistry(request, reply));
fastify.delete<OnlyIdInBody>('/registry', async (request, reply) => await deleteDockerRegistry(request, reply));
fastify.post('/upload', async (request) => { fastify.post('/upload', async (request) => {
try { try {
const teamId = request.user.teamId; const teamId = request.user.teamId;
@@ -53,7 +57,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
}); });
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply)) fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
// fastify.get('/certificates', async (request) => await getCertificates(request))
}; };
export default root; export default root;

View File

@@ -2,6 +2,9 @@ import { OnlyId } from "../../../../types"
export interface SaveSettings { export interface SaveSettings {
Body: { Body: {
previewSeparator: string,
numberOfDockerImagesKeptLocally: number,
doNotTrack: boolean,
fqdn: string, fqdn: string,
isAPIDebuggingEnabled: boolean, isAPIDebuggingEnabled: boolean,
isRegistrationEnabled: boolean, isRegistrationEnabled: boolean,
@@ -21,30 +24,46 @@ export interface DeleteDomain {
} }
export interface CheckDomain extends OnlyId { export interface CheckDomain extends OnlyId {
Body: { Body: {
fqdn: string, fqdn: string,
forceSave: boolean, forceSave: boolean,
dualCerts: boolean, dualCerts: boolean,
isDNSCheckEnabled: boolean, isDNSCheckEnabled: boolean,
} }
} }
export interface CheckDNS { export interface CheckDNS {
Params: { Params: {
domain: string, domain: string,
} }
} }
export interface SaveSSHKey { export interface SaveSSHKey {
Body: { Body: {
privateKey: string, privateKey: string,
name: string name: string
} }
} }
export interface DeleteSSHKey { export interface DeleteSSHKey {
Body: { Body: {
id: string id: string
} }
} }
export interface OnlyIdInBody { export interface OnlyIdInBody {
Body: { Body: {
id: string id: string
} }
}
export interface SetDefaultRegistry {
Body: {
id: string
username: string
password: string
}
}
export interface AddDefaultRegistry {
Body: {
url: string
name: string
username: string
password: string
}
} }

View File

@@ -37,9 +37,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params const { id } = request.params
const { teamId } = request.user const { teamId } = request.user
const settings = await prisma.setting.findFirst({}); const settings = await prisma.setting.findFirst({});
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
if (id === 'new') { if (id === 'new') {
return { return {

View File

@@ -71,7 +71,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase(); const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase(); const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
if (!allowedGithubEvents.includes(githubEvent)) { if (!allowedGithubEvents.includes(githubEvent)) {
throw { status: 500, message: 'Event not allowed.' } throw { status: 500, message: 'Event not allowed.', type: 'webhook' }
} }
if (githubEvent === 'ping') { if (githubEvent === 'ping') {
return { pong: 'cool' } return { pong: 'cool' }
@@ -89,9 +89,10 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
branch = body.pull_request.base.ref branch = body.pull_request.base.ref
} }
if (!projectId || !branch) { if (!projectId || !branch) {
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' } throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!', type: 'webhook' }
} }
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch); const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
if (applicationsFound && applicationsFound.length > 0) { if (applicationsFound && applicationsFound.length > 0) {
for (const application of applicationsFound) { for (const application of applicationsFound) {
const buildId = cuid(); const buildId = cuid();
@@ -106,7 +107,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
const checksum = Buffer.from(githubSignature, 'utf8'); const checksum = Buffer.from(githubSignature, 'utf8');
//@ts-ignore //@ts-ignore
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) { if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?' } throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?', type: 'webhook' }
}; };
} }
@@ -156,7 +157,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
const sourceBranch = body.pull_request.head.ref const sourceBranch = body.pull_request.head.ref
const sourceRepository = body.pull_request.head.repo.full_name const sourceRepository = body.pull_request.head.repo.full_name
if (!allowedActions.includes(pullmergeRequestAction)) { if (!allowedActions.includes(pullmergeRequestAction)) {
throw { status: 500, message: 'Action not allowed.' } throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
} }
if (application.settings.previews) { if (application.settings.previews) {
@@ -168,7 +169,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
} }
); );
if (!isRunning) { if (!isRunning) {
throw { status: 500, message: 'Application not running.' } throw { status: 500, message: 'Application not running.', type: 'webhook' }
} }
} }
if ( if (
@@ -192,7 +193,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
data: { data: {
pullmergeRequestId, pullmergeRequestId,
sourceBranch, sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`, customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } } application: { connect: { id: application.id } }
} }
}) })
@@ -257,8 +258,8 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
} }
} }
} }
} catch ({ status, message }) { } catch ({ status, message, type }) {
return errorHandler({ status, message }) return errorHandler({ status, message, type })
} }
} }

View File

@@ -44,8 +44,9 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update']; const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
const webhookToken = request.headers['x-gitlab-token']; const webhookToken = request.headers['x-gitlab-token'];
if (!webhookToken && !isDev) { if (!webhookToken && !isDev) {
throw { status: 500, message: 'Invalid webhookToken.' } throw { status: 500, message: 'Invalid webhookToken.', type: 'webhook' }
} }
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
if (objectKind === 'push') { if (objectKind === 'push') {
const projectId = Number(project_id); const projectId = Number(project_id);
const branch = ref.split('/')[2]; const branch = ref.split('/')[2];
@@ -95,10 +96,10 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
const pullmergeRequestId = request.body.object_attributes.iid.toString(); const pullmergeRequestId = request.body.object_attributes.iid.toString();
const projectId = Number(id); const projectId = Number(id);
if (!allowedActions.includes(action)) { if (!allowedActions.includes(action)) {
throw { status: 500, message: 'Action not allowed.' } throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
} }
if (isDraft) { if (isDraft) {
throw { status: 500, message: 'Draft MR, do nothing.' } throw { status: 500, message: 'Draft MR, do nothing.', type: 'webhook' }
} }
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch); const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
if (applicationsFound && applicationsFound.length > 0) { if (applicationsFound && applicationsFound.length > 0) {
@@ -113,11 +114,11 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
} }
); );
if (!isRunning) { if (!isRunning) {
throw { status: 500, message: 'Application not running.' } throw { status: 500, message: 'Application not running.', type: 'webhook' }
} }
} }
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) { if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!' } throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!', type: 'webhook' }
} }
if ( if (
action === 'opened' || action === 'opened' ||
@@ -140,7 +141,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
data: { data: {
pullmergeRequestId, pullmergeRequestId,
sourceBranch, sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`, customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } } application: { connect: { id: application.id } }
} }
}) })
@@ -188,7 +189,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
} }
} }
} }
} catch ({ status, message }) { } catch ({ status, message, type }) {
return errorHandler({ status, message }) return errorHandler({ status, message, type })
} }
} }

View File

@@ -1,5 +1,5 @@
import { FastifyRequest } from "fastify"; import { FastifyRequest } from "fastify";
import { errorHandler, getDomain, isDev, prisma, executeDockerCmd, fixType } from "../../../lib/common"; import { errorHandler, getDomain, isDev, prisma, executeCommand } from "../../../lib/common";
import { getTemplates } from "../../../lib/services"; import { getTemplates } from "../../../lib/services";
import { OnlyId } from "../../../types"; import { OnlyId } from "../../../types";
@@ -171,8 +171,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
}; };
try { try {
const { id = null } = request.params; const { id = null } = request.params;
const settings = await prisma.setting.findFirst(); const coolifySettings = await prisma.setting.findFirst();
if (settings.isTraefikUsed && settings.proxyDefaultRedirect) { if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) {
traefik.http.routers['catchall-http'] = { traefik.http.routers['catchall-http'] = {
entrypoints: ["web"], entrypoints: ["web"],
rule: "HostRegexp(`{catchall:.*}`)", rule: "HostRegexp(`{catchall:.*}`)",
@@ -190,7 +190,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
traefik.http.middlewares['redirect-regexp'] = { traefik.http.middlewares['redirect-regexp'] = {
redirectregex: { redirectregex: {
regex: '(.*)', regex: '(.*)',
replacement: settings.proxyDefaultRedirect, replacement: coolifySettings.proxyDefaultRedirect,
permanent: false permanent: false
} }
} }
@@ -263,10 +263,12 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
const runningContainers = {} const runningContainers = {}
applications.forEach((app) => dockerIds.add(app.destinationDocker.id)); applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
for (const dockerId of dockerIds) { for (const dockerId of dockerIds) {
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` }) const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
const containersArray = container.trim().split('\n'); if (container) {
if (containersArray.length > 0) { const containersArray = container.trim().split('\n');
runningContainers[dockerId] = containersArray if (containersArray.length > 0) {
runningContainers[dockerId] = containersArray
}
} }
} }
for (const application of applications) { for (const application of applications) {
@@ -287,11 +289,10 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
if ( if (
!runningContainers[destinationDockerId] || !runningContainers[destinationDockerId] ||
runningContainers[destinationDockerId].length === 0 || runningContainers[destinationDockerId].length === 0 ||
!runningContainers[destinationDockerId].includes(id) runningContainers[destinationDockerId].filter((container) => container.startsWith(id)).length === 0
) { ) {
continue continue
} }
if (buildPack === 'compose') { if (buildPack === 'compose') {
const services = Object.entries(JSON.parse(dockerComposeConfiguration)) const services = Object.entries(JSON.parse(dockerComposeConfiguration))
if (services.length > 0) { if (services.length > 0) {
@@ -333,20 +334,22 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) } traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
if (previews) { if (previews) {
const { stdout } = await executeDockerCmd({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` }) const { stdout } = await executeCommand({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
const containers = stdout if (stdout) {
.trim() const containers = stdout
.split('\n') .trim()
.filter((a) => a) .split('\n')
.map((c) => c.replace(/"/g, '')); .filter((a) => a)
if (containers.length > 0) { .map((c) => c.replace(/"/g, ''));
for (const container of containers) { if (containers.length > 0) {
const previewDomain = `${container.split('-')[1]}.${domain}`; for (const container of containers) {
const nakedDomain = previewDomain.replace(/^www\./, ''); const previewDomain = `${container.split('-')[1]}${coolifySettings.previewSeparator}${domain}`;
const pathPrefix = '/' const nakedDomain = previewDomain.replace(/^www\./, '');
const serviceId = `${container}-${port || 'default'}` const pathPrefix = '/'
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } const serviceId = `${container}-${port || 'default'}`
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) } traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) }
}
} }
} }
} }
@@ -360,10 +363,12 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
const runningContainers = {} const runningContainers = {}
services.forEach((app) => dockerIds.add(app.destinationDocker.id)); services.forEach((app) => dockerIds.add(app.destinationDocker.id));
for (const dockerId of dockerIds) { for (const dockerId of dockerIds) {
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` }) const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
const containersArray = container.trim().split('\n'); if (container) {
if (containersArray.length > 0) { const containersArray = container.trim().split('\n');
runningContainers[dockerId] = containersArray if (containersArray.length > 0) {
runningContainers[dockerId] = containersArray
}
} }
} }
for (const service of services) { for (const service of services) {
@@ -396,8 +401,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
} }
found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id)); found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id));
for (const oneService of Object.keys(found.services)) { for (const oneService of Object.keys(found.services)) {
const isProxyConfiguration = found.services[oneService].proxy; const isDomainConfiguration = found?.services[oneService]?.proxy?.filter(p => p.domain) ?? [];
if (isProxyConfiguration) { if (isDomainConfiguration.length > 0) {
const { proxy } = found.services[oneService]; const { proxy } = found.services[oneService];
for (let configuration of proxy) { for (let configuration of proxy) {
if (configuration.domain) { if (configuration.domain) {
@@ -432,20 +437,24 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
} }
} else { } else {
if (found.services[oneService].ports && found.services[oneService].ports.length > 0) { if (found.services[oneService].ports && found.services[oneService].ports.length > 0) {
let port = found.services[oneService].ports[0] for (let [index, port] of found.services[oneService].ports.entries()) {
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port') if (port == 22) continue;
if (foundPortVariable) { if (index === 0) {
port = foundPortVariable.value const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port')
if (foundPortVariable) {
port = foundPortVariable.value
}
}
const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const pathPrefix = '/'
const isCustomSSL = false
const serviceId = `${oneService}-${port || 'default'}`
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
} }
const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const pathPrefix = '/'
const isCustomSSL = false
const serviceId = `${oneService}-${port || 'default'}`
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
} }
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
node_modules
backup/*

27
apps/backup/Dockerfile Normal file
View File

@@ -0,0 +1,27 @@
ARG PNPM_VERSION=7.17.1
FROM node:18-slim as build
WORKDIR /app
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
COPY ./package*.json .
RUN pnpm install -p
COPY . .
# Production build
FROM node:18-slim
ARG DOCKER_VERSION=20.10.18
ARG TARGETPLATFORM
ENV NODE_ENV production
WORKDIR /app
RUN apt update && apt -y install curl
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
RUN chmod +x /usr/bin/docker
COPY --from=minio/mc:latest /usr/bin/mc /usr/bin/mc
COPY --from=build /app/ .
ENV CHECKPOINT_DISABLE=1
CMD node /app/src/index.mjs

View File

24
apps/backup/package.json Normal file
View File

@@ -0,0 +1,24 @@
{
"name": "backup",
"version": "0.0.1",
"description": "",
"author": "Andras Bacsai",
"license": "Apache-2.0",
"main": "index.mjs",
"type": "module",
"scripts": {
"start": "NODE_ENV=production node src/index.mjs",
"dev": "pnpm cleanup && NODE_ENV=development node src/index.mjs",
"build": "docker build -t backup .",
"test": "pnpm build && docker run -ti --rm -v /var/run/docker.sock:/var/run/docker.sock -v /root/devel/coolify/apps/backup/backups:/app/backups -e CONTAINERS_TO_BACKUP='clatmhc6000008lvb5a5tnvsk:database:mysql:local' backup",
"cleanup": "rm -rf backups/*"
},
"keywords": [],
"dependencies": {
"@aws-sdk/client-s3": "^3.222.0",
"@aws-sdk/lib-storage": "^3.222.0",
"cuid": "2.1.8",
"dotenv": "16.0.3",
"zx": "7.1.1"
}
}

126
apps/backup/src/index.mjs Normal file
View File

@@ -0,0 +1,126 @@
import * as dotenv from 'dotenv';
dotenv.config()
import 'zx/globals';
import cuid from 'cuid';
import { S3, PutObjectCommand } from "@aws-sdk/client-s3";
import fs from 'fs';
const isDev = process.env.NODE_ENV === 'development'
$.verbose = !!isDev
if (!process.env.CONTAINERS_TO_BACKUP && !isDev) {
console.log(chalk.red(`No containers to backup!`))
process.exit(1)
}
const mysqlGzipLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:local';
const mysqlRawLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:raw:local';
const postgresqlGzipLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:gzip:local';
const postgresqlRawLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:raw:local';
const minio = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:minio|http|min.arm.coolify.io|backups|<access_key>|<secret_key>';
const digitalOcean = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:do|https|fra1.digitaloceanspaces.com|backups|<access_key>|<secret_key>';
const devContainers = [mysqlGzipLocal, mysqlRawLocal, postgresqlGzipLocal, postgresqlRawLocal]
const containers = isDev
? devContainers
: process.env.CONTAINERS_TO_BACKUP.split(',')
const backup = async (container) => {
const id = cuid()
const [name, backupType, type, zipped, storage] = container.split(':')
const directory = `backups`;
const filename = zipped === 'raw'
? `${name}-${type}-${backupType}-${new Date().getTime()}.sql`
: `${name}-${type}-${backupType}-${new Date().getTime()}.tgz`
const backup = `${directory}/${filename}`;
try {
await $`docker inspect ${name.split(' ')[0]}`.quiet()
if (backupType === 'database') {
if (type === 'mysql') {
console.log(chalk.blue(`Backing up ${name}:${type}...`))
const { stdout: rootPassword } = await $`docker exec ${name} printenv MYSQL_ROOT_PASSWORD`.quiet()
if (zipped === 'raw') {
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" > ${backup}`
} else if (zipped === 'gzip') {
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" | gzip > ${backup}`
}
}
if (type === 'postgresql') {
console.log(chalk.blue(`Backing up ${name}:${type}...`))
const { stdout: userPassword } = await $`docker exec ${name} printenv POSTGRES_PASSWORD`
const { stdout: user } = await $`docker exec ${name} printenv POSTGRES_USER`
if (zipped === 'raw') {
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()}> ${backup}`
} else if (zipped === 'gzip') {
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()} | gzip > ${backup}`
}
}
const [storageType, ...storageArgs] = storage.split('|')
if (storageType !== 'local') {
let s3Protocol, s3Url, s3Bucket, s3Key, s3Secret = null
if (storageArgs.length > 0) {
[s3Protocol, s3Url, s3Bucket, s3Key, s3Secret] = storageArgs
}
if (storageType === 'minio') {
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
return
}
await $`mc alias set ${id} ${s3Protocol}://${s3Url} ${s3Key} ${s3Secret}`
await $`mc stat ${id}`
await $`mc cp ${backup} ${id}/${s3Bucket}`
await $`rm ${backup}`
await $`mc alias rm ${id}`
} else if (storageType === 'do') {
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
return
}
console.log({ s3Protocol, s3Url, s3Bucket, s3Key, s3Secret })
console.log(chalk.blue(`Uploading ${name}:${type} to DigitalOcean Spaces...`))
const readstream = fs.createReadStream(backup)
const bucketParams = {
Bucket: s3Bucket,
Key: filename,
Body: readstream
};
const s3Client = new S3({
forcePathStyle: false,
endpoint: `${s3Protocol}://${s3Url}`,
region: "us-east-1",
credentials: {
accessKeyId: s3Key,
secretAccessKey: s3Secret
},
});
try {
const data = await s3Client.send(new PutObjectCommand(bucketParams));
console.log(chalk.green("Successfully uploaded backup: " +
bucketParams.Bucket +
"/" +
bucketParams.Key
)
);
return data;
} catch (err) {
console.log("Error", err);
}
}
}
}
console.log(chalk.green(`Backup of ${name}:${type} complete!`))
} catch (error) {
console.log(chalk.red(`Backup of ${name}:${type} failed!`))
console.log(chalk.red(error))
}
}
const promises = []
for (const container of containers) {
// await backup(container);
promises.push(backup(container))
}
await Promise.all(promises)

13
apps/client/.eslintignore Normal file
View File

@@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

20
apps/client/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,20 @@
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
plugins: ['svelte3', '@typescript-eslint'],
ignorePatterns: ['*.cjs'],
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
settings: {
'svelte3/typescript': () => require('typescript')
},
parserOptions: {
sourceType: 'module',
ecmaVersion: 2020
},
env: {
browser: true,
es2017: true,
node: true
}
};

10
apps/client/.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

1
apps/client/.npmrc Normal file
View File

@@ -0,0 +1 @@
engine-strict=true

View File

@@ -0,0 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
.env
.env.*
!.env.example
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

9
apps/client/.prettierrc Normal file
View File

@@ -0,0 +1,9 @@
{
"useTabs": true,
"singleQuote": true,
"trailingComma": "none",
"printWidth": 100,
"plugins": ["prettier-plugin-svelte"],
"pluginSearchDirs": ["."],
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
}

1
apps/client/README.md Normal file
View File

@@ -0,0 +1 @@
# SvelteKit Static site

54
apps/client/package.json Normal file
View File

@@ -0,0 +1,54 @@
{
"name": "client",
"description": "Coolify's SvelteKit UI",
"license": "Apache-2.0",
"private": true,
"scripts": {
"dev": "vite dev",
"build": "vite build && cp -Pr build/ ../../build/public",
"preview": "vite preview",
"test": "playwright test",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "prettier --plugin-search-dir . --check . && eslint .",
"format": "prettier --plugin-search-dir . --write ."
},
"devDependencies": {
"@playwright/test": "1.28.1",
"@sveltejs/adapter-static": "1.0.0-next.48",
"@sveltejs/kit": "1.0.0-next.572",
"@types/js-cookie": "3.0.2",
"@typescript-eslint/eslint-plugin": "5.44.0",
"@typescript-eslint/parser": "5.44.0",
"autoprefixer": "10.4.13",
"eslint": "8.28.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-svelte3": "4.0.0",
"postcss": "8.4.19",
"postcss-load-config": "4.0.1",
"prettier": "2.8.0",
"prettier-plugin-svelte": "2.8.1",
"svelte": "3.53.1",
"svelte-check": "2.9.2",
"svelte-preprocess": "^4.10.7",
"tailwindcss": "3.2.4",
"tslib": "2.4.1",
"typescript": "4.9.3",
"vite": "3.2.4"
},
"type": "module",
"dependencies": {
"@trpc/client": "10.1.0",
"@trpc/server": "10.1.0",
"cuid": "2.1.8",
"daisyui": "2.41.0",
"dayjs": "1.11.6",
"flowbite-svelte": "0.28.0",
"js-cookie": "3.0.1",
"js-yaml": "4.1.0",
"p-limit": "4.0.0",
"server": "workspace:*",
"superjson": "1.11.0",
"svelte-select": "4.4.7"
}
}

View File

@@ -0,0 +1,10 @@
import type { PlaywrightTestConfig } from '@playwright/test';
const config: PlaywrightTestConfig = {
webServer: {
command: 'npm run build && npm run preview',
port: 4173
}
};
export default config;

1793
apps/client/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
const tailwindcss = require('tailwindcss');
const autoprefixer = require('autoprefixer');
const config = {
plugins: [
//Some plugins, like tailwindcss/nesting, need to run before Tailwind,
tailwindcss(),
//But others, like autoprefixer, need to run after,
autoprefixer
]
};
module.exports = config;

12
apps/client/src/app.d.ts vendored Normal file
View File

@@ -0,0 +1,12 @@
// See https://kit.svelte.dev/docs/types#app
// for information about these interfaces
// and what to do when importing types
declare namespace App {
// interface Locals {}
// interface PageData {}
// interface Error {}
// interface Platform {}
}
declare const GITPOD_WORKSPACE_URL: string;
declare const CODESANDBOX_HOST: string;

12
apps/client/src/app.html Normal file
View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
<meta name="viewport" content="width=device-width" />
%sveltekit.head%
</head>
<body>
<div class="h-screen">%sveltekit.body%</div>
</body>
</html>

284
apps/client/src/app.postcss Normal file
View File

@@ -0,0 +1,284 @@
/* Write your global styles here, in PostCSS syntax */
@tailwind base;
@tailwind components;
@tailwind utilities;
@font-face {
font-family: 'Poppins';
font-style: normal;
font-weight: 400;
src: local(''), url('/poppins-v19-latin-ext_latin_devanagari-regular.woff2') format('woff2'),
url('/poppins-v19-latin-ext_latin_devanagari-regular.woff') format('woff');
}
@font-face {
font-family: 'Poppins';
font-style: normal;
font-weight: 500;
src: local(''), url('/poppins-v19-latin-ext_latin_devanagari-500.woff2') format('woff2'),
url('/poppins-v19-latin-ext_latin_devanagari-500.woff') format('woff');
}
button {
@apply text-sm !important;
}
html {
@apply h-full min-h-full overflow-y-scroll;
}
body {
@apply min-h-screen overflow-x-hidden bg-coolblack text-sm text-white scrollbar-w-1 scrollbar-thumb-coollabs scrollbar-track-coolgray-200;
}
input,
.input {
@apply h-12 w-96 rounded border border-transparent bg-coolgray-200 p-2 text-xs tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:border disabled:border-dashed disabled:border-coolgray-200 disabled:bg-transparent disabled:bg-coolblack md:text-sm;
}
textarea {
@apply min-w-[14rem] rounded border border-transparent bg-coolgray-200 p-2 text-xs tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:border disabled:border-dashed disabled:border-coolgray-200 disabled:bg-transparent md:text-sm;
}
#svelte .custom-select-wrapper .selectContainer.disabled input {
@apply placeholder:text-stone-600;
}
#svelte .custom-select-wrapper .selectContainer input {
@apply text-white;
}
#svelte .custom-select-wrapper .selectContainer {
@apply h-12 rounded bg-coolgray-200 p-2 px-0 text-xs tracking-tight outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 md:text-sm;
}
#svelte .listContainer {
@apply bg-coolgray-400 text-white scrollbar-w-2 scrollbar-thumb-green-500 scrollbar-track-coolgray-200;
}
#svelte .selectedItem {
@apply pl-2;
}
#svelte .item.hover {
@apply bg-coollabs text-white !important;
}
#svelte .item.active {
@apply bg-coolgray-100 text-white;
}
select {
@apply h-12 w-96 rounded bg-coolgray-200 p-2 text-xs font-bold tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:text-stone-600 md:text-sm;
}
.custom-select-wrapper {
--background: rgb(32 32 32);
--inputColor: white;
--multiItemPadding: 0;
--multiSelectPadding: 0 0.5rem 0 0.5rem;
--border: none;
--placeholderColor: rgb(87 83 78);
--listBackground: rgb(32 32 32);
--itemColor: white;
--itemHoverBG: rgb(107 22 237);
--multiItemBG: rgb(32 32 32);
--multiClearHoverBG: transparent;
--multiClearHoverFill: rgb(239 68 68);
--multiItemActiveBG: transparent;
--multiClearBG: transparent;
--clearSelectFocusColor: white;
--clearSelectHoverColor: rgb(239 68 68);
--multiItemBorderRadius: 0.25rem;
--listShadow: none;
}
label {
@apply inline-block;
}
.btn {
@apply text-white text-base min-w-fit no-animation;
}
a {
@apply underline hover:text-white;
}
.content {
@apply p-2 px-4;
}
.title {
@apply text-lg lg:text-2xl font-bold;
}
.subtitle {
@apply text-lg lg:text-xl font-bold text-indigo-300;
}
.label {
@apply text-sm leading-6 font-semibold text-sky-500 dark:text-sky-400;
}
.card {
@apply border bg-coolgray-100 border-coolgray-200 rounded p-2 space-y-2 sticky top-4 mb-2 items-center;
}
.icon-holder {
overflow: hidden;
height: 30px;
border-radius: 5px;
margin-right: 8px;
background: linear-gradient(0deg, #999, #ddd);
}
.instance-status-running {
box-shadow: 1px 4px 5px #3df721;
}
.instance-status-stopped {
box-shadow: 1px 4px 5px rgb(110, 191, 225);
}
.instance-status-error {
box-shadow: 1px 4px 5px #fb00ff;
}
.instance-status-degraded {
box-shadow: 1px 4px 5px #f7b121;
}
.badge-status-healthy,
.badge-status-running {
@apply text-green-500;
}
.badge-status-degraded {
@apply text-green-500;
}
.badge-status-stopped {
@apply text-sky-500;
}
.delete-button {
@apply bg-red-600;
}
.delete-button:hover {
@apply bg-red-500;
}
/* Interchange menu position */
.menu-left {
display: flex;
flex-direction: row;
}
.menu-left .menu-bar {
display: flex;
flex-direction: column;
}
.menu-left .menu-bar > * {
display: flex;
flex-direction: column;
}
.menu-top {
display: flex;
flex-direction: column;
}
.menu-top .menu-bar {
display: flex;
flex-direction: row;
}
.menu-top .menu-bar > * {
display: flex;
flex-direction: row;
}
.nav-main {
@apply fixed top-0 left-0 min-h-screen w-16 min-w-[4rem] overflow-hidden border-r border-stone-800 bg-coolgray-200 scrollbar-w-1 scrollbar-thumb-coollabs scrollbar-track-coolgray-200 xl:overflow-visible;
}
.nav-side {
@apply absolute right-0 top-0 z-50 m-5 flex flex-wrap items-center justify-end space-x-2 bg-coolblack/40 text-white;
}
.add-icon {
@apply rounded p-1 transition duration-200;
}
.icons {
@apply rounded p-2 transition duration-200 hover:bg-coolgray-500 disabled:bg-coolblack disabled:text-coolgray-500 !important;
}
.arrow-right-applications {
@apply -ml-6 px-2 font-bold text-green-500;
}
.border-gradient {
border-bottom: 2px solid transparent;
-o-border-image: linear-gradient(
0.25turn,
rgba(255, 249, 34),
rgba(255, 0, 128),
rgba(56, 2, 155, 0)
);
border-image: linear-gradient(
0.25turn,
rgba(255, 249, 34),
rgba(255, 0, 128),
rgba(56, 2, 155, 0)
);
border-image-slice: 1;
}
.border-gradient-full {
border: 4px solid transparent;
-o-border-image: linear-gradient(
0.25turn,
rgba(255, 249, 34),
rgba(255, 0, 128),
rgba(56, 2, 155, 0)
);
border-image: linear-gradient(
0.25turn,
rgba(255, 249, 34),
rgba(255, 0, 128),
rgba(56, 2, 155, 0)
);
border-image-slice: 1;
}
.box-selection {
@apply min-w-[16rem] justify-center rounded border-transparent bg-coolgray-200 p-6 hover:border-transparent hover:bg-coolgray-400;
}
.lds-heart {
animation: lds-heart 1.2s infinite cubic-bezier(0.215, 0.61, 0.355, 1);
}
@keyframes lds-heart {
0% {
transform: scale(1);
}
5% {
transform: scale(1.2);
}
39% {
transform: scale(0.85);
}
45% {
transform: scale(1);
}
60% {
transform: scale(0.95);
}
100% {
transform: scale(0.9);
}
}
.sub-menu {
@apply w-48 text-base font-bold hover:bg-coolgray-500 rounded p-2 hover:text-white text-stone-200 cursor-pointer;
}
.sub-menu-active {
@apply bg-coolgray-500 text-white;
}
.table tbody td,
.table tbody th,
.table thead th {
background-color: transparent;
}
.table * {
border: none;
}
.header {
@apply flex flex-row z-10 w-full py-5 px-5;
}
.burger {
@apply block m-[2px] h-[3px] w-5 rounded;
}
.bg-coollabs-gradient {
@apply bg-gradient-to-r from-purple-500 via-pink-500 to-red-500;
}

View File

@@ -0,0 +1,201 @@
import { dev } from '$app/environment';
import { addToast } from './store';
import Cookies from 'js-cookie';
export const asyncSleep = (delay: number) => new Promise((resolve) => setTimeout(resolve, delay));
export function errorNotification(error: any | { message: string }): void {
if (error instanceof Error) {
console.error(error.message)
addToast({
message: error.message,
type: 'error'
});
} else {
console.error(error)
addToast({
message: error,
type: 'error'
});
}
}
export function getRndInteger(min: number, max: number) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
export function getDomain(domain: string) {
return domain?.replace('https://', '').replace('http://', '');
}
export const notNodeDeployments = ['php', 'docker', 'rust', 'python', 'deno', 'laravel', 'heroku'];
export const staticDeployments = [
'react',
'vuejs',
'static',
'svelte',
'gatsby',
'php',
'astro',
'eleventy'
];
export function getAPIUrl() {
if (GITPOD_WORKSPACE_URL) {
const { href } = new URL(GITPOD_WORKSPACE_URL);
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
return newURL;
}
if (CODESANDBOX_HOST) {
return `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
}
return dev ? `http://${window.location.hostname}:3001` : 'http://localhost:3000';
}
export function getWebhookUrl(type: string) {
if (GITPOD_WORKSPACE_URL) {
const { href } = new URL(GITPOD_WORKSPACE_URL);
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
if (type === 'github') {
return `${newURL}/webhooks/github/events`;
}
if (type === 'gitlab') {
return `${newURL}/webhooks/gitlab/events`;
}
}
if (CODESANDBOX_HOST) {
const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
if (type === 'github') {
return `${newURL}/webhooks/github/events`;
}
if (type === 'gitlab') {
return `${newURL}/webhooks/gitlab/events`;
}
}
return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`;
}
async function send({
method,
path,
data = null,
headers,
timeout = 120000
}: {
method: string;
path: string;
data?: any;
headers?: any;
timeout?: number;
}): Promise<Record<string, unknown>> {
const token = Cookies.get('token');
const controller = new AbortController();
const id = setTimeout(() => controller.abort(), timeout);
const opts: any = { method, headers: {}, body: null, signal: controller.signal };
if (data && Object.keys(data).length > 0) {
const parsedData = data;
for (const [key, value] of Object.entries(data)) {
if (value === '') {
parsedData[key] = null;
}
}
if (parsedData) {
opts.headers['Content-Type'] = 'application/json';
opts.body = JSON.stringify(parsedData);
}
}
if (headers) {
opts.headers = {
...opts.headers,
...headers
};
}
if (token && !path.startsWith('https://')) {
opts.headers = {
...opts.headers,
Authorization: `Bearer ${token}`
};
}
if (!path.startsWith('https://')) {
path = `/api/v1${path}`;
}
if (dev && !path.startsWith('https://')) {
path = `${getAPIUrl()}${path}`;
}
if (method === 'POST' && data && !opts.body) {
opts.body = data;
}
const response = await fetch(`${path}`, opts);
clearTimeout(id);
const contentType = response.headers.get('content-type');
let responseData = {};
if (contentType) {
if (contentType?.indexOf('application/json') !== -1) {
responseData = await response.json();
} else if (contentType?.indexOf('text/plain') !== -1) {
responseData = await response.text();
} else {
return {};
}
} else {
return {};
}
if (!response.ok) {
if (
response.status === 401 &&
!path.startsWith('https://api.github') &&
!path.includes('/v4/')
) {
Cookies.remove('token');
}
throw responseData;
}
return responseData;
}
export function get(path: string, headers?: Record<string, unknown>): Promise<Record<string, any>> {
return send({ method: 'GET', path, headers });
}
export function del(
path: string,
data: Record<string, unknown>,
headers?: Record<string, unknown>
): Promise<Record<string, any>> {
return send({ method: 'DELETE', path, data, headers });
}
export function post(
path: string,
data: Record<string, unknown> | FormData,
headers?: Record<string, unknown>
): Promise<Record<string, any>> {
return send({ method: 'POST', path, data, headers });
}
export function put(
path: string,
data: Record<string, unknown>,
headers?: Record<string, unknown>
): Promise<Record<string, any>> {
return send({ method: 'PUT', path, data, headers });
}
export function changeQueryParams(buildId: string) {
const queryParams = new URLSearchParams(window.location.search);
queryParams.set('buildId', buildId);
// @ts-ignore
return history.pushState(null, null, '?' + queryParams.toString());
}
export const dateOptions: any = {
year: 'numeric',
month: 'short',
day: '2-digit',
hour: 'numeric',
minute: 'numeric',
second: 'numeric',
hour12: false
};

View File

@@ -0,0 +1 @@
<span class="badge bg-coollabs-gradient rounded text-white font-normal"> BETA </span>

View File

@@ -0,0 +1,156 @@
<script lang="ts">
import { browser } from '$app/environment';
import { addToast } from '$lib/store';
let showPassword = false;
export let value: string;
export let disabled = false;
export let isPasswordField = false;
export let readonly = false;
export let textarea = false;
export let required = false;
export let pattern: string | null | undefined = null;
export let id: string;
export let name: string;
export let placeholder = '';
export let inputStyle = '';
let disabledClass = 'input input-primary bg-coolback disabled:bg-coolblack w-full';
let isHttps = browser && window.location.protocol === 'https:';
function copyToClipboard() {
if (isHttps && navigator.clipboard) {
navigator.clipboard.writeText(value);
addToast({
message: 'Copied to clipboard.',
type: 'success'
});
}
}
</script>
<div class="relative">
{#if !isPasswordField || showPassword}
{#if textarea}
<textarea
style={inputStyle}
rows="5"
class={disabledClass}
class:pr-10={true}
class:pr-20={value && isHttps}
class:border={required && !value}
class:border-red-500={required && !value}
{placeholder}
type="text"
{id}
{pattern}
{required}
{readonly}
{disabled}
{name}>{value}</textarea
>
{:else}
<input
style={inputStyle}
class={disabledClass}
type="text"
class:pr-10={true}
class:pr-20={value && isHttps}
class:border={required && !value}
class:border-red-500={required && !value}
{id}
{name}
{required}
{pattern}
{readonly}
bind:value
{disabled}
{placeholder}
/>
{/if}
{:else}
<input
style={inputStyle}
class={disabledClass}
class:pr-10={true}
class:pr-20={value && isHttps}
class:border={required && !value}
class:border-red-500={required && !value}
type="password"
{id}
{name}
{readonly}
{pattern}
{required}
bind:value
{disabled}
{placeholder}
/>
{/if}
<div class="absolute top-0 right-0 flex justify-center items-center h-full cursor-pointer text-stone-600 hover:text-white mr-3">
<div class="flex space-x-2">
{#if isPasswordField}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div on:click={() => (showPassword = !showPassword)}>
{#if showPassword}
<svg
xmlns="http://www.w3.org/2000/svg"
class="h-6 w-6"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M13.875 18.825A10.05 10.05 0 0112 19c-4.478 0-8.268-2.943-9.543-7a9.97 9.97 0 011.563-3.029m5.858.908a3 3 0 114.243 4.243M9.878 9.878l4.242 4.242M9.88 9.88l-3.29-3.29m7.532 7.532l3.29 3.29M3 3l3.59 3.59m0 0A9.953 9.953 0 0112 5c4.478 0 8.268 2.943 9.543 7a10.025 10.025 0 01-4.132 5.411m0 0L21 21"
/>
</svg>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
class="h-6 w-6"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M15 12a3 3 0 11-6 0 3 3 0 016 0z"
/>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M2.458 12C3.732 7.943 7.523 5 12 5c4.478 0 8.268 2.943 9.542 7-1.274 4.057-5.064 7-9.542 7-4.477 0-8.268-2.943-9.542-7z"
/>
</svg>
{/if}
</div>
{/if}
{#if value && isHttps}
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div on:click={copyToClipboard}>
<svg
xmlns="http://www.w3.org/2000/svg"
class="h-6 w-6"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
fill="none"
stroke-linecap="round"
stroke-linejoin="round"
>
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
<rect x="8" y="8" width="12" height="12" rx="2" />
<path d="M16 8v-2a2 2 0 0 0 -2 -2h-8a2 2 0 0 0 -2 2v8a2 2 0 0 0 2 2h2" />
</svg>
</div>
{/if}
</div>
</div>
</div>

View File

@@ -0,0 +1,38 @@
<script lang="ts">
// import { onMount } from 'svelte';
// import Tooltip from './Tooltip.svelte';
export let explanation = '';
export let position = 'dropdown-right';
// let id: any;
// let self: any;
// onMount(() => {
// id = `info-${self.offsetLeft}-${self.offsetTop}`;
// });
</script>
<div class={`dropdown dropdown-end ${position}`}>
<!-- svelte-ignore a11y-label-has-associated-control -->
<!-- svelte-ignore a11y-no-noninteractive-tabindex -->
<label tabindex="0" class="btn btn-circle btn-ghost btn-xs text-sky-500">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
class="w-4 h-4 stroke-current"
><path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
/></svg
>
</label>
<!-- svelte-ignore a11y-no-noninteractive-tabindex -->
<div tabindex="0" class="card compact dropdown-content shadow bg-coolgray-400 rounded w-64">
<div class="card-body">
<!-- <h2 class="card-title">You needed more info?</h2> -->
<p class="text-xs font-normal">{@html explanation}</p>
</div>
</div>
</div>

View File

@@ -0,0 +1,87 @@
<script lang="ts">
import Beta from './Beta.svelte';
import Explaner from './Explainer.svelte';
import Tooltip from './Tooltip.svelte';
export let id: any;
export let customClass: any = null;
export let setting: any;
export let title: any;
export let isBeta: any = false;
export let description: any = null;
export let isCenter = true;
export let disabled = false;
export let dataTooltip: any = null;
export let loading = false;
let triggeredBy = `#${id}`;
</script>
<div class="flex items-center py-4 pr-8">
<div class="flex w-96 flex-col">
<!-- svelte-ignore a11y-label-has-associated-control -->
<label>
{title}
{#if isBeta}
<Beta />
{/if}
{#if description && description !== ''}
<Explaner explanation={description} />
{/if}
</label>
</div>
</div>
<div class:text-center={isCenter} class={`flex justify-center ${customClass}`}>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
on:click
aria-pressed="false"
class="relative mx-20 inline-flex h-6 w-11 flex-shrink-0 cursor-pointer rounded-full border-2 border-transparent transition-colors duration-200 ease-in-out"
class:opacity-50={disabled || loading}
class:bg-green-600={!loading && setting}
class:bg-stone-700={!loading && !setting}
class:bg-yellow-500={loading}
{id}
>
<span class="sr-only">Use setting</span>
<span
class="pointer-events-none relative inline-block h-5 w-5 transform rounded-full bg-white shadow transition duration-200 ease-in-out"
class:translate-x-5={setting}
class:translate-x-0={!setting}
>
<span
class=" absolute inset-0 flex h-full w-full items-center justify-center transition-opacity duration-200 ease-in"
class:opacity-0={setting}
class:opacity-100={!setting}
class:animate-spin={loading}
aria-hidden="true"
>
<svg class="h-3 w-3 bg-white text-red-600" fill="none" viewBox="0 0 12 12">
<path
d="M4 8l2-2m0 0l2-2M6 6L4 4m2 2l2 2"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
/>
</svg>
</span>
<span
class="absolute inset-0 flex h-full w-full items-center justify-center transition-opacity duration-100 ease-out"
aria-hidden="true"
class:opacity-100={setting}
class:opacity-0={!setting}
class:animate-spin={loading}
>
<svg class="h-3 w-3 bg-white text-green-600" fill="currentColor" viewBox="0 0 12 12">
<path
d="M3.707 5.293a1 1 0 00-1.414 1.414l1.414-1.414zM5 8l-.707.707a1 1 0 001.414 0L5 8zm4.707-3.293a1 1 0 00-1.414-1.414l1.414 1.414zm-7.414 2l2 2 1.414-1.414-2-2-1.414 1.414zm3.414 2l4-4-1.414-1.414-4 4 1.414 1.414z"
/>
</svg>
</span>
</span>
</div>
</div>
{#if dataTooltip}
<Tooltip {triggeredBy} placement="top">{dataTooltip}</Tooltip>
{/if}

View File

@@ -0,0 +1,64 @@
<script>
import { createEventDispatcher } from 'svelte';
const dispatch = createEventDispatcher();
export let type = 'info';
function success() {
if (type === 'success') {
return 'bg-dark lg:bg-primary';
}
}
</script>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
on:click={() => dispatch('click')}
on:mouseover={() => dispatch('pause')}
on:focus={() => dispatch('pause')}
on:mouseout={() => dispatch('resume')}
on:blur={() => dispatch('resume')}
class={` flex flex-row justify-center alert shadow-lg text-white hover:scale-105 transition-all duration-100 cursor-pointer rounded ${success()}`}
class:alert-error={type === 'error'}
class:alert-info={type === 'info'}
>
{#if type === 'success'}
<svg
xmlns="http://www.w3.org/2000/svg"
class="stroke-current flex-shrink-0 h-6 w-6"
fill="none"
viewBox="0 0 24 24"
><path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"
/></svg
>
{:else if type === 'error'}
<svg
xmlns="http://www.w3.org/2000/svg"
class="stroke-current flex-shrink-0 h-6 w-6"
fill="none"
viewBox="0 0 24 24"
><path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z"
/></svg
>
{:else if type === 'info'}
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
class="stroke-current flex-shrink-0 w-6 h-6"
><path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
/></svg
>
{/if}
<slot />
</div>

View File

@@ -0,0 +1,25 @@
<script lang="ts">
import Toast from './Toast.svelte';
import { dismissToast, pauseToast, resumeToast, toasts } from '$lib/store';
</script>
{#if $toasts.length > 0}
<section>
<article class="toast toast-top toast-center rounded-none w-2/3 lg:w-[20rem]" role="alert">
{#each $toasts as toast (toast.id)}
<Toast
type={toast.type}
on:resume={() => resumeToast(toast.id)}
on:pause={() => pauseToast(toast.id)}
on:click={() => dismissToast(toast.id)}>{@html toast.message}</Toast
>
{/each}
</article>
</section>
{/if}
<style lang="postcss">
section {
@apply fixed top-0 left-0 right-0 w-full flex flex-col mt-4 justify-center z-[1000];
}
</style>

View File

@@ -0,0 +1,10 @@
<script lang="ts">
import { Tooltip } from 'flowbite-svelte';
export let placement = 'bottom';
export let color = 'bg-coollabs';
export let triggeredBy = '#tooltip-default';
</script>
<Tooltip {triggeredBy} {placement} arrow={false} defaultClass={color + ' font-thin text-xs text-left border-none p-2'} style="custom"
><slot /></Tooltip
>

View File

@@ -0,0 +1,206 @@
<script lang="ts">
import { dev } from '$app/environment';
import {
addToast,
appSession,
features,
updateLoading,
isUpdateAvailable,
latestVersion
} from '$lib/store';
import { asyncSleep, errorNotification } from '$lib/common';
import { onMount } from 'svelte';
import Tooltip from './Tooltip.svelte';
let updateStatus: any = {
found: false,
loading: false,
success: null
};
async function update() {
updateStatus.loading = true;
try {
if (dev) {
localStorage.setItem('lastVersion', $appSession.version);
await asyncSleep(1000);
updateStatus.loading = false;
return window.location.reload();
} else {
localStorage.setItem('lastVersion', $appSession.version);
// await post(`/update`, { type: 'update', latestVersion: $latestVersion });
addToast({
message: 'Update completed.<br><br>Waiting for the new version to start...',
type: 'success'
});
let reachable = false;
let tries = 0;
do {
await asyncSleep(4000);
try {
// await get(`/undead`);
reachable = true;
} catch (error) {
reachable = false;
}
if (reachable) break;
tries++;
} while (!reachable || tries < 120);
addToast({
message: 'New version reachable. Reloading...',
type: 'success'
});
updateStatus.loading = false;
updateStatus.success = true;
await asyncSleep(3000);
return window.location.reload();
}
} catch (error) {
updateStatus.success = false;
updateStatus.loading = false;
return errorNotification(error);
}
}
onMount(async () => {
if ($appSession.userId) {
const overrideVersion = $features.latestVersion;
if ($appSession.teamId === '0') {
if ($updateLoading === true) return;
try {
$updateLoading = true;
// const data = await get(`/update`);
if (overrideVersion || data?.isUpdateAvailable) {
$latestVersion = overrideVersion || data.latestVersion;
if (overrideVersion) {
$isUpdateAvailable = true;
} else {
$isUpdateAvailable = data.isUpdateAvailable;
}
}
} catch (error) {
return errorNotification(error);
} finally {
$updateLoading = false;
}
}
}
});
</script>
<div class="py-0 lg:py-2">
{#if $appSession.teamId === '0'}
{#if $isUpdateAvailable}
<button
id="update"
disabled={updateStatus.success === false}
on:click={update}
class="icons bg-coollabs-gradient text-white duration-75 hover:scale-105 w-full"
>
{#if updateStatus.loading}
<svg
xmlns="http://www.w3.org/2000/svg"
class="lds-heart h-8 w-8 mx-auto"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
fill="none"
stroke-linecap="round"
stroke-linejoin="round"
>
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
<path
d="M19.5 13.572l-7.5 7.428l-7.5 -7.428m0 0a5 5 0 1 1 7.5 -6.566a5 5 0 1 1 7.5 6.572"
/>
</svg>
{:else if updateStatus.success === null}
<div class="flex items-center justify-center space-x-2">
<svg
xmlns="http://www.w3.org/2000/svg"
class="h-8 w-8"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
fill="none"
stroke-linecap="round"
stroke-linejoin="round"
>
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
<circle cx="12" cy="12" r="9" />
<line x1="12" y1="8" x2="8" y2="12" />
<line x1="12" y1="8" x2="12" y2="16" />
<line x1="16" y1="12" x2="12" y2="8" />
</svg>
<span class="flex lg:hidden">Update available</span>
</div>
{:else if updateStatus.success}
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-8 w-8"
><path
fill="#DD2E44"
d="M11.626 7.488c-.112.112-.197.247-.268.395l-.008-.008L.134 33.141l.011.011c-.208.403.14 1.223.853 1.937.713.713 1.533 1.061 1.936.853l.01.01L28.21 24.735l-.008-.009c.147-.07.282-.155.395-.269 1.562-1.562-.971-6.627-5.656-11.313-4.687-4.686-9.752-7.218-11.315-5.656z"
/><path
fill="#EA596E"
d="M13 12L.416 32.506l-.282.635.011.011c-.208.403.14 1.223.853 1.937.232.232.473.408.709.557L17 17l-4-5z"
/><path
fill="#A0041E"
d="M23.012 13.066c4.67 4.672 7.263 9.652 5.789 11.124-1.473 1.474-6.453-1.118-11.126-5.788-4.671-4.672-7.263-9.654-5.79-11.127 1.474-1.473 6.454 1.119 11.127 5.791z"
/><path
fill="#AA8DD8"
d="M18.59 13.609c-.199.161-.459.245-.734.215-.868-.094-1.598-.396-2.109-.873-.541-.505-.808-1.183-.735-1.862.128-1.192 1.324-2.286 3.363-2.066.793.085 1.147-.17 1.159-.292.014-.121-.277-.446-1.07-.532-.868-.094-1.598-.396-2.11-.873-.541-.505-.809-1.183-.735-1.862.13-1.192 1.325-2.286 3.362-2.065.578.062.883-.057 1.012-.134.103-.063.144-.123.148-.158.012-.121-.275-.446-1.07-.532-.549-.06-.947-.552-.886-1.102.059-.549.55-.946 1.101-.886 2.037.219 2.973 1.542 2.844 2.735-.13 1.194-1.325 2.286-3.364 2.067-.578-.063-.88.057-1.01.134-.103.062-.145.123-.149.157-.013.122.276.446 1.071.532 2.037.22 2.973 1.542 2.844 2.735-.129 1.192-1.324 2.286-3.362 2.065-.578-.062-.882.058-1.012.134-.104.064-.144.124-.148.158-.013.121.276.446 1.07.532.548.06.947.553.886 1.102-.028.274-.167.511-.366.671z"
/><path
fill="#77B255"
d="M30.661 22.857c1.973-.557 3.334.323 3.658 1.478.324 1.154-.378 2.615-2.35 3.17-.77.216-1.001.584-.97.701.034.118.425.312 1.193.095 1.972-.555 3.333.325 3.657 1.479.326 1.155-.378 2.614-2.351 3.17-.769.216-1.001.585-.967.702.033.117.423.311 1.192.095.53-.149 1.084.16 1.233.691.148.532-.161 1.084-.693 1.234-1.971.555-3.333-.323-3.659-1.479-.324-1.154.379-2.613 2.353-3.169.77-.217 1.001-.584.967-.702-.032-.117-.422-.312-1.19-.096-1.974.556-3.334-.322-3.659-1.479-.325-1.154.378-2.613 2.351-3.17.768-.215.999-.585.967-.701-.034-.118-.423-.312-1.192-.096-.532.15-1.083-.16-1.233-.691-.149-.53.161-1.082.693-1.232z"
/><path
fill="#AA8DD8"
d="M23.001 20.16c-.294 0-.584-.129-.782-.375-.345-.432-.274-1.061.156-1.406.218-.175 5.418-4.259 12.767-3.208.547.078.927.584.849 1.131-.078.546-.58.93-1.132.848-6.493-.922-11.187 2.754-11.233 2.791-.186.148-.406.219-.625.219z"
/><path
fill="#77B255"
d="M5.754 16c-.095 0-.192-.014-.288-.042-.529-.159-.829-.716-.67-1.245 1.133-3.773 2.16-9.794.898-11.364-.141-.178-.354-.353-.842-.316-.938.072-.849 2.051-.848 2.071.042.551-.372 1.031-.922 1.072-.559.034-1.031-.372-1.072-.923-.103-1.379.326-4.035 2.692-4.214 1.056-.08 1.933.287 2.552 1.057 2.371 2.951-.036 11.506-.542 13.192-.13.433-.528.712-.958.712z"
/><circle fill="#5C913B" cx="25.5" cy="9.5" r="1.5" /><circle
fill="#9266CC"
cx="2"
cy="18"
r="2"
/><circle fill="#5C913B" cx="32.5" cy="19.5" r="1.5" /><circle
fill="#5C913B"
cx="23.5"
cy="31.5"
r="1.5"
/><circle fill="#FFCC4D" cx="28" cy="4" r="2" /><circle
fill="#FFCC4D"
cx="32.5"
cy="8.5"
r="1.5"
/><circle fill="#FFCC4D" cx="29.5" cy="12.5" r="1.5" /><circle
fill="#FFCC4D"
cx="7.5"
cy="23.5"
r="1.5"
/></svg
>
{:else}
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-9 w-8"
><path
fill="#FFCC4D"
d="M36 18c0 9.941-8.059 18-18 18S0 27.941 0 18 8.059 0 18 0s18 8.059 18 18"
/><path
fill="#664500"
d="M22 27c0 2.763-1.791 3-4 3-2.21 0-4-.237-4-3 0-2.761 1.79-6 4-6 2.209 0 4 3.239 4 6zm8-12c-.124 0-.25-.023-.371-.072-5.229-2.091-7.372-5.241-7.461-5.374-.307-.46-.183-1.081.277-1.387.459-.306 1.077-.184 1.385.274.019.027 1.93 2.785 6.541 4.629.513.206.763.787.558 1.3-.157.392-.533.63-.929.63zM6 15c-.397 0-.772-.238-.929-.629-.205-.513.044-1.095.557-1.3 4.612-1.844 6.523-4.602 6.542-4.629.308-.456.929-.577 1.387-.27.457.308.581.925.275 1.383-.089.133-2.232 3.283-7.46 5.374C6.25 14.977 6.124 15 6 15z"
/><path fill="#5DADEC" d="M24 16h4v19l-4-.046V16zM8 35l4-.046V16H8v19z" /><path
fill="#664500"
d="M14.999 18c-.15 0-.303-.034-.446-.105-3.512-1.756-7.07-.018-7.105 0-.495.249-1.095.046-1.342-.447-.247-.494-.047-1.095.447-1.342.182-.09 4.498-2.197 8.895 0 .494.247.694.848.447 1.342-.176.35-.529.552-.896.552zm14 0c-.15 0-.303-.034-.446-.105-3.513-1.756-7.07-.018-7.105 0-.494.248-1.094.047-1.342-.447-.247-.494-.047-1.095.447-1.342.182-.09 4.501-2.196 8.895 0 .494.247.694.848.447 1.342-.176.35-.529.552-.896.552z"
/><ellipse fill="#5DADEC" cx="18" cy="34" rx="18" ry="2" /><ellipse
fill="#E75A70"
cx="18"
cy="27"
rx="3"
ry="2"
/></svg
>
{/if}
</button>
<Tooltip triggeredBy="#update" placement="right" color="bg-coolgray-200 text-white"
>New Version Available!</Tooltip
>
{/if}
{/if}
</div>

Some files were not shown because too many files have changed in this diff Show More