Compare commits

..

326 Commits

Author SHA1 Message Date
Andras Bacsai
1d93658e56 Merge pull request #680 from coollabsio/next
v3.10.16
2022-10-12 22:05:39 +02:00
Andras Bacsai
2b7865e6ea update packages 2022-10-12 19:57:32 +00:00
Andras Bacsai
0cdba8c329 fix: single container logs and usage with compose 2022-10-12 19:53:21 +00:00
Andras Bacsai
11b317b788 ui: new resource label 2022-10-12 15:10:00 +02:00
Andras Bacsai
fb955e15f4 Merge pull request #656 from coollabsio/next
v3.10.15
2022-10-12 14:51:56 +02:00
Andras Bacsai
ae2d141f0d fix: pull does not work remotely on huge compose file 2022-10-12 14:27:13 +02:00
Andras Bacsai
68c983923e fix: dockerfile 2022-10-12 14:08:00 +02:00
Andras Bacsai
bf252f7f20 fix: appwrite v1 missing containers 2022-10-12 13:50:28 +02:00
Andras Bacsai
324038486f fixes 2022-10-12 12:02:47 +02:00
Andras Bacsai
bef5da49cf remove text 2022-10-12 11:43:47 +02:00
Andras Bacsai
24e7f547fa feat: monitoring by container 2022-10-12 11:42:45 +02:00
Andras Bacsai
3ee3ab0ad1 fix: port required if fqdn is set 2022-10-12 11:27:13 +02:00
Andras Bacsai
f734154da8 fix: check compose domains in general 2022-10-12 11:17:02 +02:00
Andras Bacsai
7a053ce697 fix: gitlab auth and compose reload 2022-10-12 11:11:18 +02:00
Andras Bacsai
25f250310e fix: dev container 2022-10-12 10:18:28 +02:00
Andras Bacsai
4eca05bbba fix: gh release 2022-10-12 10:07:18 +02:00
Andras Bacsai
45b0f791bb ci: update staging release 2022-10-11 10:54:22 +02:00
Andras Bacsai
42415a81c1 fix: update docker binaries 2022-10-11 10:54:13 +02:00
Andras Bacsai
6882e83d1e fix: logs for not running containers 2022-10-10 15:28:46 +02:00
Andras Bacsai
d4b7318413 fix: smart search for new services 2022-10-10 15:28:36 +02:00
Andras Bacsai
a2b4d400af fix: add git sha to build args 2022-10-10 15:28:16 +02:00
Andras Bacsai
f07868d24e fix: do not show nope as ip address for dbs 2022-10-10 15:28:02 +02:00
Andras Bacsai
d46ee049f4 Merge pull request #668 from bstst/bugfix/fix-deno-run-options
Fix deno options string
2022-10-10 15:26:00 +02:00
Andras Bacsai
c62eda5627 Merge pull request #666 from cmer/cmer-inject-git-sha
Inject GIT SHA into build
2022-10-10 15:17:18 +02:00
Martin Saulis
7683164ed2 fix deno options string 2022-10-07 14:59:16 +03:00
Carl Mercier
7090c16575 Update common.ts 2022-10-06 15:42:57 -04:00
Carl Mercier
9e634fed13 Inject GIT SHA into build process
As discussed at https://github.com/docker/hub-feedback/issues/600
2022-10-06 15:38:15 -04:00
Andras Bacsai
9bb125cebd feat: docker compose 2022-10-06 15:51:08 +02:00
Andras Bacsai
0c4850b91d fixes 2022-10-06 14:24:28 +02:00
Andras Bacsai
0eb7688c4d fixes 2022-10-06 14:15:05 +02:00
Andras Bacsai
f47cdb68d9 fixes 2022-10-06 12:01:24 +02:00
Andras Bacsai
d3c3cded37 debug: one less worker thread 2022-10-06 11:44:36 +02:00
Andras Bacsai
e91ea4ecbe feat: docker compose 2022-10-06 11:37:47 +02:00
Andras Bacsai
680b20d199 update dev container flow 2022-10-06 11:37:42 +02:00
Andras Bacsai
ec97e04fd4 revert last debug 2022-10-06 11:37:28 +02:00
Andras Bacsai
b0b2657fe0 debug: remove worker jobs 2022-10-06 10:26:40 +02:00
Andras Bacsai
d27426fd8f feat: docker compose support 2022-10-06 10:25:41 +02:00
Andras Bacsai
d8206c0e3e wip: docker compose 2022-10-05 15:34:52 +02:00
Andras Bacsai
3f1841a188 init: docker-compose support 2022-10-05 10:27:12 +00:00
Andras Bacsai
cb478e0dc8 add contribution guide on container based development flow 2022-10-05 09:13:51 +00:00
Andras Bacsai
02c42a7e3a fix: pure docker based development 2022-10-05 09:01:17 +00:00
Andras Bacsai
ef40f7349e Merge pull request #653 from coollabsio/next
v3.10.14
2022-10-05 09:24:39 +02:00
Andras Bacsai
86eebb35cb fix: do not use npx 2022-10-05 08:58:14 +02:00
Andras Bacsai
a901388887 revert 2022-10-04 23:06:46 +02:00
Andras Bacsai
6cd1c5de38 Dockerfile update 2022-10-04 22:22:29 +02:00
Andras Bacsai
7489f172a1 test prestart 2022-10-04 22:14:16 +02:00
Andras Bacsai
702798c275 revert things 2022-10-04 22:00:50 +02:00
Andras Bacsai
430d51866c test: remove prisma 2022-10-04 21:46:23 +02:00
Andras Bacsai
9d08421f01 dev intervals 2022-10-04 21:46:01 +02:00
Andras Bacsai
f4051874b2 Merge pull request #654 from vvvctr/patch-1
Proper capitalization for WordPress service type.
2022-10-04 21:14:52 +02:00
vvvctr
bbe0690056 Proper capitalization for WordPress service type. 2022-10-04 16:27:56 +02:00
Andras Bacsai
772c0d1e41 fix: nope if you are not logged in 2022-10-04 15:14:52 +02:00
Andras Bacsai
8eb9ca0260 fix: add buildkit features 2022-10-04 15:06:09 +02:00
Andras Bacsai
bd27afe0da fix: verify and configure remote docker engines 2022-10-04 15:01:47 +02:00
Andras Bacsai
a3af21275a fix: meilisearch data dir 2022-10-04 14:05:11 +02:00
Andras Bacsai
61eb155d13 chore: version++ 2022-10-04 14:05:01 +02:00
Andras Bacsai
7932c1c4a9 Merge pull request #648 from coollabsio/next
v3.10.13
2022-10-03 12:56:59 +02:00
Andras Bacsai
f776fb83e7 ui: settings icon 2022-10-03 11:45:24 +02:00
Andras Bacsai
a97521aba2 webhook: send 200 for ping and installation wh 2022-10-03 11:42:07 +02:00
Andras Bacsai
d1c0fe503e fix: remove unnecessary things 2022-10-03 11:32:15 +02:00
Andras Bacsai
ed02c1ae36 ui: iam & settings update 2022-10-03 11:31:50 +02:00
Andras Bacsai
9a67cf7355 fix: fork pr previews 2022-10-03 09:48:47 +02:00
Andras Bacsai
755eeda364 remove inspector 2022-10-03 09:25:31 +02:00
Andras Bacsai
136dee7747 ui: fix indicato 2022-10-03 09:20:57 +02:00
Andras Bacsai
e4e8428855 minify api 2022-10-02 11:08:04 +00:00
Andras Bacsai
de8dc021f9 fix: pr branches 2022-10-02 09:37:08 +00:00
Andras Bacsai
991587f252 fix: typo 2022-10-02 09:24:43 +00:00
Andras Bacsai
8dbcf257c4 fix: handle forked repositories 2022-10-02 09:16:51 +00:00
Andras Bacsai
0b067364a9 fix: default 0 pending invitations 2022-10-02 08:55:36 +00:00
Andras Bacsai
5367bd6134 show webhook details 2022-10-02 08:48:56 +00:00
Andras Bacsai
92228c4379 schema migration 2022-10-02 08:43:45 +00:00
Andras Bacsai
fb2c7896b3 update packages 2022-10-02 08:43:36 +00:00
Andras Bacsai
23265d9091 revert last changes 2022-10-02 10:38:08 +02:00
Andras Bacsai
2c9bb0e767 disable stuff 2022-10-01 13:58:50 +00:00
Andras Bacsai
f9e8400d83 temporary disable schedulers 2022-10-01 13:46:52 +00:00
Andras Bacsai
927a13cd76 temporary enable inspector 2022-10-01 13:03:55 +00:00
Andras Bacsai
51b3293e69 ui: inprogress version of iam 2022-09-29 15:46:52 +02:00
Andras Bacsai
3f76cadea9 fix: cleanup stucked tcp proxies 2022-09-29 14:44:20 +02:00
Andras Bacsai
6dbf53b558 chore: version++ 2022-09-29 14:32:55 +02:00
Andras Bacsai
22e937c798 fix: do not start tcp proxy without main container 2022-09-29 14:32:35 +02:00
Andras Bacsai
ac5cc8b299 Merge pull request #643 from coollabsio/next
v3.10.12
2022-09-29 14:09:37 +02:00
Andras Bacsai
c588ab723b fix: show logs better 2022-09-29 13:57:52 +02:00
Andras Bacsai
4b2dfc051d typo 2022-09-29 13:47:15 +02:00
Andras Bacsai
5238c83f3f fix: initial deploy status 2022-09-29 13:23:38 +02:00
Andras Bacsai
90bb580e50 ui: fixes 2022-09-29 13:23:29 +02:00
Andras Bacsai
f40e142704 ui: fix 2022-09-29 13:15:19 +02:00
Andras Bacsai
a67618675d fix: default buildImage and baseBuildImage 2022-09-29 13:15:16 +02:00
Andras Bacsai
4fe436e4d1 fix: dashboard statuses 2022-09-29 13:02:10 +02:00
Andras Bacsai
683b8c966f feat: cleanup unconfigured services and databases 2022-09-28 15:41:20 +02:00
Andras Bacsai
28377a156d feat: cleanup unconfigured applications 2022-09-28 11:45:02 +02:00
Andras Bacsai
3dcc4faabb Merge pull request #642 from coollabsio/next
v3.10.11
2022-09-28 11:18:01 +02:00
Andras Bacsai
60a033f93a ui: fix 2022-09-28 11:16:35 +02:00
Andras Bacsai
436bd73786 fix: baseDirectory 2022-09-28 11:14:23 +02:00
Andras Bacsai
5c69ff3339 fix: do not get status of more than 10 resources defined by category 2022-09-28 10:59:58 +02:00
Andras Bacsai
2105b1e7c4 ux: hasura console notification 2022-09-28 10:55:08 +02:00
Andras Bacsai
523004e5b2 chore: version++ 2022-09-28 10:54:57 +02:00
Andras Bacsai
5e02c386ec Merge pull request #641 from coollabsio/next
v3.10.10
2022-09-28 10:49:19 +02:00
Andras Bacsai
b4501fe52d ui: beta flag 2022-09-28 10:41:32 +02:00
Andras Bacsai
3c29eaa1b1 ui: small fix 2022-09-28 10:35:47 +02:00
Andras Bacsai
ee67e163b1 feat: system-wide github apps 2022-09-28 10:34:27 +02:00
Andras Bacsai
9662bc29fb ui: fix gitlab importer view 2022-09-28 09:56:27 +02:00
Andras Bacsai
96f2660b98 ui: loading button 2022-09-28 09:47:05 +02:00
Andras Bacsai
20f594c66c chore: version++ 2022-09-28 09:30:57 +02:00
Andras Bacsai
2b8d59dca3 Merge pull request #637 from coollabsio/next
v3.10.9
2022-09-28 09:29:31 +02:00
Andras Bacsai
d44047d109 ui: dev logs 2022-09-28 09:19:51 +02:00
Andras Bacsai
57c4d33bd3 ui: main resource search 2022-09-28 09:07:59 +02:00
Andras Bacsai
7a5377efe0 ui: resource button fix 2022-09-28 09:07:46 +02:00
Andras Bacsai
91e7cffccc fix: only log things to console in dev mode 2022-09-28 08:39:59 +02:00
Andras Bacsai
df31e47313 fix: disable development low disk space 2022-09-28 08:39:33 +02:00
Andras Bacsai
cb9586270c fix: able to delete apps in unconfigured state 2022-09-27 09:27:28 +00:00
Andras Bacsai
21dfa5227c fix: logs in docker bp 2022-09-26 20:37:58 +00:00
Andras Bacsai
9d15d2be77 chore: version++ 2022-09-26 20:22:08 +00:00
Andras Bacsai
929c02d31f ui: fix basedirectory meaning 2022-09-26 20:21:41 +00:00
Andras Bacsai
846185dd42 Merge pull request #635 from coollabsio/next
v3.10.8
2022-09-26 21:36:54 +02:00
Andras Bacsai
7bc2299a8e rename grafana dashboard to grafana 2022-09-26 19:24:13 +00:00
Andras Bacsai
d40e131bd8 fix: appwrite function network is not the default 2022-09-26 19:20:41 +00:00
Andras Bacsai
552c7297bf ui: service fixes 2022-09-26 19:00:36 +00:00
Andras Bacsai
3f5fd23955 ui: fix button 2022-09-26 18:57:24 +00:00
Andras Bacsai
8b8566251e chore: version++ 2022-09-26 18:49:05 +00:00
Andras Bacsai
6db47def8e fix: service logs 2022-09-26 18:48:22 +00:00
Andras Bacsai
1d0edc7b25 Merge pull request #634 from coollabsio/next
v3.10.7
2022-09-26 15:43:27 +02:00
Andras Bacsai
f9a417638a fix: seed 2022-09-26 13:42:19 +00:00
Andras Bacsai
984fe01551 Merge pull request #632 from coollabsio/next
v3.10.6
2022-09-26 13:43:49 +02:00
Andras Bacsai
d0cb350687 fix: error notification 2022-09-26 13:37:06 +02:00
Andras Bacsai
5f51011ce1 fix: empty preview value 2022-09-26 13:36:54 +02:00
Andras Bacsai
9ca125ac55 fix: error notification 2022-09-26 13:36:47 +02:00
Andras Bacsai
360f4f8c27 fix: seed new preview secret types 2022-09-26 13:36:15 +02:00
Andras Bacsai
6501f71bd6 chore: version++ 2022-09-26 13:24:02 +02:00
Andras Bacsai
bf6b799dba Merge pull request #625 from coollabsio/next
v3.10.5
2022-09-26 11:23:01 +02:00
Andras Bacsai
5f57279283 fix: multiplex ssh and ssl copy 2022-09-26 11:15:14 +02:00
Andras Bacsai
5ed3565520 ui: Beta features 2022-09-26 10:31:52 +02:00
Andras Bacsai
513fa90b8a ui: fixes 2022-09-26 10:27:51 +02:00
Andras Bacsai
a4d9b9689b fix: laravel php chooser 2022-09-26 10:27:42 +02:00
Andras Bacsai
1c05c0dcbb ui: fix 2022-09-26 10:21:01 +02:00
Andras Bacsai
a1b49a3a6b fix: base directory & docker bp 2022-09-26 10:20:53 +02:00
Andras Bacsai
6f57298cbb fix: scp without host verification & cert copy 2022-09-26 09:52:04 +02:00
Andras Bacsai
d8ce673088 fix: debug log for bp 2022-09-25 08:00:53 +00:00
Andras Bacsai
4cd7af7a74 fix: stream logs for heroku bp 2022-09-25 07:58:52 +00:00
Andras Bacsai
49c61b5992 fix: allow basedirectory for heroku 2022-09-25 07:48:03 +00:00
Andras Bacsai
e44d0550d2 fix: basedirectory should be empty if null 2022-09-25 07:47:54 +00:00
Andras Bacsai
17f82109b6 fix: consider base directory in heroku bp 2022-09-25 07:47:37 +00:00
Andras Bacsai
2d8888ae9b ui: fixes 2022-09-23 20:01:30 +00:00
Andras Bacsai
4abe9c6fb2 feat: ssl certificate sets custom ssl for applications 2022-09-23 15:21:19 +02:00
Andras Bacsai
f9d94fa660 ui: fixes 2022-09-23 14:20:37 +02:00
Andras Bacsai
eaa13f4990 remove self-signed certs 2022-09-23 14:10:17 +02:00
Andras Bacsai
01fd5901fe ui: fixes
fix: secret saving process
2022-09-23 14:09:26 +02:00
Andras Bacsai
3d6adeffc4 ui: more UI improvements 2022-09-22 15:48:16 +02:00
Andras Bacsai
9066952759 ui: redesign applications & settings
fix: follow logs
2022-09-22 12:30:28 +02:00
Andras Bacsai
6dd7f6274a fix: error during saving logs 2022-09-22 12:30:04 +02:00
Andras Bacsai
7a8fe6d152 ui: settings view 2022-09-22 09:47:33 +02:00
Andras Bacsai
be507be3a9 feat: refresh resource status on dashboard 2022-09-22 09:47:25 +02:00
Andras Bacsai
657b97f190 ui: fix destination view 2022-09-22 09:09:31 +02:00
Andras Bacsai
9d7745cd9b fix: settings db requests 2022-09-22 09:04:44 +02:00
Andras Bacsai
3668f83693 fix: not found redirect 2022-09-22 09:04:32 +02:00
Andras Bacsai
a2d5d99c1f fix: able to search with id 2022-09-22 09:03:34 +02:00
Andras Bacsai
f379ef6a3b feat: ssl cert on traefik config 2022-09-22 09:03:19 +02:00
Andras Bacsai
510a748749 fix: multiplex ssh connections 2022-09-22 09:02:53 +02:00
Andras Bacsai
550150d685 fix: db migration 2022-09-22 09:02:39 +02:00
Andras Bacsai
011ea9659e fix: ssl certificate distribution 2022-09-22 09:02:27 +02:00
Andras Bacsai
6eca7d948e add migration 2022-09-21 15:48:49 +02:00
Andras Bacsai
90e639f119 feat: custom certificate 2022-09-21 15:48:32 +02:00
Andras Bacsai
86ac6461d1 fix: dropdown 2022-09-20 13:33:35 +00:00
Andras Bacsai
18a95bf9ab ui: improvements 2022-09-20 15:06:33 +02:00
Andras Bacsai
7949bbe66d Merge branch 'temp' into next 2022-09-20 14:58:25 +02:00
Andras Bacsai
4b603c452a Merge pull request #602 from c0ldfront/trilium-notes-service
add trilium-notes-service
2022-09-20 14:54:53 +02:00
Andras Bacsai
837f0634b6 Merge pull request #606 from c0ldfront/grafana-service
add grafana-dashboard-service
2022-09-20 14:51:06 +02:00
Andras Bacsai
78076f7854 Merge branch 'next' into grafana-service 2022-09-20 14:50:37 +02:00
Andras Bacsai
719350cee1 Merge pull request #614 from c0ldfront/minio-login-issue
fix: MinIO invalid login
2022-09-20 14:49:41 +02:00
Andras Bacsai
4f6be3e6f5 revert: show usage everytime 2022-09-20 14:37:29 +02:00
Andras Bacsai
8e61e9fecb feat: Add migration button to appwrite 2022-09-20 14:36:06 +02:00
Andras Bacsai
2083285d78 version correction 2022-09-20 14:34:54 +02:00
Andras Bacsai
034e86e2cb ui: small logs on mobile 2022-09-20 13:07:49 +02:00
Andras Bacsai
f4a2d5c652 ui: dropdown as infobox 2022-09-19 14:01:48 +00:00
Andras Bacsai
534ccd6bf6 ui: fix git icon 2022-09-19 13:52:41 +00:00
Andras Bacsai
c17064f853 ui: fixes 2022-09-19 14:05:25 +02:00
Andras Bacsai
1e1566082f fix: tooltip 2022-09-19 12:14:14 +02:00
Andras Bacsai
449548654d Merge branch 'ui' into next 2022-09-19 12:06:00 +02:00
Andras Bacsai
6fc99524f0 ui: responsive! 2022-09-19 12:05:47 +02:00
Andras Bacsai
051629fad3 fix: ui 2022-09-19 08:57:55 +02:00
Andras Bacsai
f957008c1c Merge branch 'main' into ui 2022-09-19 08:57:48 +02:00
Andras Bacsai
98e1deec88 Merge pull request #612 from kaname-png/some-tweaks
feat(routes): ui for mobile and fixes
2022-09-19 08:54:23 +02:00
Andras Bacsai
99127652af fix: undead endpoint does not require JWT 2022-09-19 08:53:36 +02:00
Andras Bacsai
e9b9e9e82c fix: Appwrite default version 1.0 2022-09-19 08:53:22 +02:00
Andras Bacsai
2ed5c3746e chore: version++ 2022-09-19 08:53:11 +02:00
Andras Bacsai
8902056fdb Merge pull request #622 from coollabsio/next
v3.10.4
2022-09-19 08:31:54 +02:00
Andras Bacsai
defa6ff6e8 fix: update PR building status 2022-09-16 15:49:54 +02:00
Andras Bacsai
eed44e81be fix: await #2 2022-09-16 15:20:45 +02:00
Andras Bacsai
1951aec5ec fix: await 2022-09-16 15:20:17 +02:00
Andras Bacsai
9c4e0b4107 fix: get building status 2022-09-16 15:20:02 +02:00
Andras Bacsai
c8deac660d fix: appwrite?! 2022-09-16 14:49:01 +02:00
Andras Bacsai
4cc5ec9bd0 remove line 2022-09-16 14:38:33 +02:00
Andras Bacsai
c41bef2e81 Traefik add / pathprefix 2022-09-16 14:30:32 +02:00
Andras Bacsai
5b735cf960 fix: versions of appwrite 2022-09-16 14:30:25 +02:00
Andras Bacsai
604e960aa9 ui: fix buttons 2022-09-16 14:07:43 +02:00
Andras Bacsai
6c465aa1f2 feat: show remote servers 2022-09-16 14:07:35 +02:00
Andras Bacsai
c266832fdc ui: fix cleanup button 2022-09-16 14:07:28 +02:00
Andras Bacsai
906d8d0413 update contribution guide 2022-09-16 09:26:48 +02:00
Andras Bacsai
cb05fd4a3c fix: build logs 2022-09-16 09:06:45 +02:00
Kaname
2eda24799b Merge branch 'ui' into some-tweaks 2022-09-15 10:52:31 -06:00
Andras Bacsai
41e221f0cb fix: load more 2022-09-15 14:45:57 +02:00
Andras Bacsai
f75af035bb fix: logging 2022-09-15 14:27:55 +02:00
Andras Bacsai
e9e6449edf fix: canceling build 2022-09-15 14:05:33 +02:00
Andras Bacsai
f09d76da35 fix: changing build log without reload
fix: elapsed time of a build
2022-09-15 13:57:20 +02:00
Andras Bacsai
40dfe0919b hm 2022-09-15 11:15:07 +02:00
Andras Bacsai
85990dd074 fix: fluentbit and logs 2022-09-15 10:56:19 +02:00
Andras Bacsai
38acc16e1c fix: coolify update 2022-09-15 10:10:38 +02:00
Andras Bacsai
b7cc4c1e92 fix: fluentbit configuration 2022-09-15 10:08:17 +02:00
Andras Bacsai
1f232d96d8 differentiate between db logs and not 2022-09-15 10:03:38 +02:00
Andras Bacsai
83508f165d fix compose 2022-09-15 09:54:40 +02:00
Andras Bacsai
7cc58e7e84 Coolify fluent-bit image 2022-09-15 09:47:54 +02:00
Andras Bacsai
31d9740aac fix: fallback to db logs 2022-09-15 09:35:50 +02:00
Andras Bacsai
69891a64a0 feat: fluentbit 2022-09-15 09:34:39 +02:00
Andras Bacsai
0940309600 init for pgdb 2022-09-14 13:39:14 +02:00
Andras Bacsai
a762b1ed60 fix: updateMany build logs 2022-09-14 13:19:55 +02:00
Andras Bacsai
1b9d9d3a8b fix: dev url 2022-09-14 10:50:12 +02:00
Andras Bacsai
d9908b3d61 feat: previewApplications finalized 2022-09-13 15:50:20 +02:00
Andras Bacsai
c40b80436a fix: login 2022-09-13 10:14:31 +02:00
Andras Bacsai
8f1e352bcc ui: fix plausible 2022-09-13 10:14:25 +02:00
Andras Bacsai
18e769b5e5 fix: plausible analytics actions 2022-09-13 10:14:15 +02:00
Andras Bacsai
27af6459b3 feat: previewapplications init 2022-09-13 07:57:57 +00:00
Kaname
2c4bfab01a chore: whoops 2022-09-11 17:56:12 -06:00
Kaname
e689be552b Merge branch 'next' into some-tweaks 2022-09-11 17:54:04 -06:00
Andras Bacsai
ad80e7f48b Merge pull request #619 from coollabsio/next
v3.10.3
2022-09-11 17:01:22 +02:00
Andras Bacsai
d81b75b084 fix: umami init sql 2022-09-11 14:50:23 +00:00
Andras Bacsai
90f1431047 Merge pull request #617 from coollabsio/next
v3.10.2
2022-09-11 14:45:30 +02:00
Andras Bacsai
61ea7dabae fix: show error logs 2022-09-11 12:38:58 +00:00
Andras Bacsai
5d9f5f4a7d fix: gitlab importer for public repos 2022-09-11 12:38:50 +00:00
Andras Bacsai
f956f612d3 fixes 2022-09-11 12:20:01 +00:00
Andras Bacsai
3f5108268d Merge pull request #618 from coollabsio/draft/nostalgic-allen
Draft/nostalgic allen
2022-09-11 14:16:54 +02:00
Andras Bacsai
4c0dfc3f30 Merge branch 'next' into draft/nostalgic-allen 2022-09-11 14:16:26 +02:00
Andras Bacsai
1670fe9b1c Update supportedVersions.ts 2022-09-11 12:14:27 +00:00
Andras Bacsai
300b28c0f2 move reset queue button to build logs 2022-09-11 12:11:48 +00:00
Andras Bacsai
e7038961ef Merge branch 'next' into some-tweaks 2022-09-11 13:56:07 +02:00
Andras Bacsai
24e77a5211 Merge pull request #615 from ArticaDev/main
fix: changing umami image URL to get latest version
2022-09-11 13:54:11 +02:00
Andras Bacsai
9df039fbc2 Merge pull request #616 from coollabsio/draft/nostalgic-allen
feat: Add queue reset button
2022-09-11 13:53:15 +02:00
Andras Bacsai
143cd46a81 feat: Add queue reset button 2022-09-11 11:51:43 +00:00
LL
680e9871ed fix: changing umami image URL to get latest version 2022-09-10 23:32:36 -03:00
David Mydlarz
d5ece58f71 MINIO_SERVER_URL -> apiFqdn 2022-09-11 09:07:31 +09:00
Kaname
d7bbb5c4b7 chore: minor changes 2022-09-10 19:14:41 +00:00
Kaname
cf9c991c79 chore: minor changes 2022-09-10 19:03:09 +00:00
Kaname
0f0d96195d fix(routes): ui from secrets table 2022-09-10 19:00:43 +00:00
Kaname
3a562bb714 feat(routes): improve ui for apps, databases and services logs 2022-09-10 17:45:47 +00:00
Kaname
6381ba8478 fix(routes): header of settings page in databases 2022-09-10 17:27:48 +00:00
Kaname
9e3c14841a fix: ui with headers 2022-09-10 17:23:55 +00:00
Kaname
1917091338 Merge remote-tracking branch 'origin' into some-tweaks 2022-09-10 17:15:18 +00:00
Kaname
b1bb508554 Merge branch 'next' into some-tweaks 2022-09-10 17:14:44 +00:00
Andras Bacsai
0a68a48fc5 Merge pull request #611 from coollabsio/next
v3.10.1
2022-09-10 10:47:47 +02:00
Andras Bacsai
d3af6792d0 remove debug 2022-09-10 08:46:31 +00:00
Andras Bacsai
44dc3b743e add debug 2022-09-10 08:35:31 +00:00
Andras Bacsai
b469d2832d fix: delete resource use window location 2022-09-10 07:58:56 +00:00
Andras Bacsai
d844026c29 dev: arm should be on next all the time 2022-09-10 07:41:36 +00:00
Andras Bacsai
21b4990652 ui: fix follow button 2022-09-10 07:29:51 +00:00
Andras Bacsai
39e24bdc97 remove console logs 2022-09-10 07:17:20 +00:00
Andras Bacsai
bc66b98176 fix: build secrets for apps 2022-09-10 07:14:30 +00:00
Andras Bacsai
d6d3fb46cc fix: volumes for services 2022-09-10 07:14:17 +00:00
Kaname
4040b334f5 fix(routes): more ui tweaks 2022-09-10 01:54:59 +00:00
Kaname
d7e72519ef fix(routes): more ui tweaks 2022-09-10 01:30:07 +00:00
Kaname
c7752f0be9 fix(routes): more ui tweaks 2022-09-10 01:27:48 +00:00
Kaname
0ffe28a733 fix(routes): more ui tweaks 2022-09-10 01:23:17 +00:00
Kaname
56f24fe317 feat(styles): make header css component 2022-09-10 01:13:44 +00:00
Kaname
341cde2781 Merge branch 'next' into some-tweaks 2022-09-10 01:07:11 +00:00
Kaname
33bb8d434d feat(ui): improve header of pages 2022-09-10 00:16:49 +00:00
Kaname
9f813b7385 fix: github conflicts 2022-09-10 00:05:19 +00:00
Kaname
02a336a25d Merge remote-tracking branch 'origin' into some-tweaks 2022-09-09 23:57:03 +00:00
Andras Bacsai
88ed1446f4 fix: secrets for PR 2022-09-09 15:46:47 +02:00
Andras Bacsai
c69312f128 fix: remove unnecessary gitlab group name 2022-09-09 15:19:14 +02:00
Andras Bacsai
c5bcff0e10 fix: show restarting application & logs 2022-09-09 15:14:58 +02:00
Andras Bacsai
871d1e2440 ui: fix button 2022-09-09 15:14:19 +02:00
Andras Bacsai
1619afb938 chore: version++ 2022-09-09 14:49:13 +02:00
Andras Bacsai
25528913f1 fix: show restarting apps 2022-09-09 14:48:58 +02:00
David Mydlarz
7df532fa72 Grafana Dashboard service completed 2022-09-09 00:17:47 +09:00
Andras Bacsai
ef91441c76 Merge pull request #603 from coollabsio/next
v3.10.0
2022-09-08 15:41:46 +02:00
Andras Bacsai
aa6c56b63d do not show servers to non admins 2022-09-08 15:23:01 +02:00
Andras Bacsai
18e899d15e disable remote servers for now 2022-09-08 15:00:54 +02:00
Andras Bacsai
63fa8924ae fix: autoupdater 2022-09-08 14:46:08 +02:00
Andras Bacsai
0e13e3bd81 feat: new servers view 2022-09-08 14:42:04 +02:00
Andras Bacsai
372c0ed457 fix: glitchtip env to pyhton boolean 2022-09-08 12:04:08 +02:00
Andras Bacsai
071077200b ui: fix tooltip 2022-09-08 11:57:15 +02:00
Andras Bacsai
65579a2861 Merge branch 'next' of github.com:coollabsio/coolify into next 2022-09-08 11:47:09 +02:00
Andras Bacsai
bb7603ae2a Merge pull request #601 from c0ldfront/fixes
fix typo located in postCreateCommand in devcontainers.json
2022-09-08 11:45:54 +02:00
Andras Bacsai
cce67d274e fix: ui variables 2022-09-08 11:44:23 +02:00
Andras Bacsai
794329dcad fix: port checkers 2022-09-08 11:41:38 +02:00
Andras Bacsai
e36fda3ff1 fix: ispublic status on databases 2022-09-08 10:54:24 +02:00
Andras Bacsai
3832d33259 fix: save search input 2022-09-08 10:45:12 +02:00
David Mydlarz
1f40c2ccf8 add trilium-notes-service 2022-09-08 17:32:11 +09:00
Andras Bacsai
7350524456 ui: dashboard updates 2022-09-08 10:30:13 +02:00
Andras Bacsai
a1a973a873 fix: change to execa from utils 2022-09-08 09:32:50 +02:00
David Mydlarz
f2a915700c fix type located in postCreateCommand in devcontainers.json 2022-09-08 13:33:18 +09:00
Andras Bacsai
e184f99617 Merge branch 'main' into next 2022-09-07 20:32:27 +00:00
Andras Bacsai
ab07adb14f fix: Settings for service 2022-09-07 20:32:15 +00:00
Andras Bacsai
6535c68276 Merge branch 'main' into next 2022-09-07 20:15:26 +00:00
Andras Bacsai
dde2772e52 fix: dnsServer formatting 2022-09-07 20:14:49 +00:00
Kaname
4a8fd309c5 fix(routes): searchbar ui 2022-09-07 17:59:22 +00:00
Kaname
b416849d9c feat: re-apply ui improves 2022-09-07 17:14:29 +00:00
Kaname
bc321d8ced Merge branch 'next' into some-tweaks 2022-09-07 17:14:09 +00:00
Andras Bacsai
ac72c19d22 Merge branch 'main' into next 2022-09-07 15:40:37 +00:00
Andras Bacsai
67fc2fd3c0 fix: pr previews 2022-09-07 15:29:56 +00:00
Andras Bacsai
4acc59204c ui: dashboard updates and a lot more 2022-09-07 15:59:37 +02:00
Andras Bacsai
07cadb59e0 fix: edgedb 2022-09-07 11:40:58 +02:00
Andras Bacsai
6fa4741c81 feat: database secrets 2022-09-07 11:40:52 +02:00
Andras Bacsai
f4bac2382c fix: edgedb stuff 2022-09-07 11:02:13 +02:00
Andras Bacsai
67b72220c0 feat: add traefik acme json to coolify container 2022-09-07 11:01:04 +02:00
Andras Bacsai
feeb14ea47 fix: edgedb ui 2022-09-07 10:49:49 +02:00
Andras Bacsai
bdfb6f5f46 chore: version++ 2022-09-07 10:23:08 +02:00
Andras Bacsai
53491e9eaa Merge pull request #522 from ikezedev/edge-db
[New Database]: EdgeDB
2022-09-07 10:21:47 +02:00
Andras Bacsai
f720de65fa Update _DatabaseLinks.svelte 2022-09-07 10:21:12 +02:00
Andras Bacsai
3d70162a8d Merge branch 'next' into edge-db 2022-09-07 10:20:40 +02:00
Kaname
45919fc0cf fix(routes): duplicates classes in services page 2022-09-07 02:09:12 +00:00
Kaname
dd6f4c4844 fix(routes): ui from settings page 2022-09-07 02:03:48 +00:00
Kaname
bb47db033f fix(routes): more ui tweaks 2022-09-07 01:47:43 +00:00
Kaname
111ea78693 fix(routes): more ui tweaks 2022-09-07 01:47:04 +00:00
Kaname
c17253589a fix(routes): more ui tweaks 2022-09-07 01:45:05 +00:00
Kaname
7e6156f5dd fix(routes): more ui tweaks 2022-09-07 01:29:48 +00:00
Kaname
d5cfb63f52 fix(routes): ui from services page 2022-09-07 00:50:34 +00:00
Kaname
cab15055e7 fix(routes): ui from databases page 2022-09-07 00:22:56 +00:00
Kaname
9185910171 fix(routes): ui from databases page 2022-09-07 00:20:15 +00:00
Kaname
b4892e0caf fix(routes): ui from databases page 2022-09-07 00:16:57 +00:00
Kaname
83e0cafef9 chore: minor changes 2022-09-06 23:46:11 +00:00
Kaname
7cb75506c3 fix(routes): ui from destinations page 2022-09-06 23:43:11 +00:00
Kaname
ac6970ad40 fix(routes): improve design of git sources page 2022-09-06 19:06:27 +00:00
Kaname
5a95cc236c fix(routes): improve design of application page 2022-09-06 18:51:19 +00:00
Kaname
95c942f477 feat(layout): added drawer when user is in mobile 2022-09-06 17:37:26 +00:00
Ikechukwu Eze
7ed1ced521 insecure mode 2022-08-04 13:11:13 +02:00
Ikechukwu Eze
801b9c1483 Merge branch 'next' into edge-db 2022-07-31 19:54:30 +02:00
Ikechukwu Eze
3990bebca3 add to supported databases 2022-07-31 19:50:25 +02:00
Ikechukwu Eze
8a2de1001f first steps with ui 2022-07-31 19:50:03 +02:00
236 changed files with 13687 additions and 8277 deletions

View File

@@ -26,7 +26,7 @@
// Use 'forwardPorts' to make a list of ports inside the container available locally. // Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [3000, 3001], "forwardPorts": [3000, 3001],
// Use 'postCreateCommand' to run commands after the container is created. // Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "cp apps/api/.env.example pps/api/.env && pnpm install && pnpm db:push && pnpm db:seed", "postCreateCommand": "cp apps/api/.env.example apps/api/.env && pnpm install && pnpm db:push && pnpm db:seed",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node", "remoteUser": "node",
"features": { "features": {

View File

@@ -5,7 +5,7 @@ on:
types: [released] types: [released]
jobs: jobs:
arm64-build: arm64:
runs-on: [self-hosted, arm64] runs-on: [self-hosted, arm64]
steps: steps:
- name: Checkout - name: Checkout
@@ -31,7 +31,7 @@ jobs:
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
amd64-build: amd64:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
@@ -57,9 +57,35 @@ jobs:
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
aarch64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/aarch64
push: true
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
merge-manifest: merge-manifest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [amd64-build, arm64-build] needs: [amd64, arm64, aarch64]
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v3
@@ -77,7 +103,7 @@ jobs:
id: package-version id: package-version
- name: Create & publish manifest - name: Create & publish manifest
run: | run: |
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}} docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
- uses: sarisia/actions-status-discord@v1 - uses: sarisia/actions-status-discord@v1
if: always() if: always()

View File

@@ -6,7 +6,7 @@ on:
- next - next
jobs: jobs:
arm64-making-something-cool: arm64:
runs-on: [self-hosted, arm64] runs-on: [self-hosted, arm64]
steps: steps:
- name: Checkout - name: Checkout
@@ -34,7 +34,7 @@ jobs:
tags: coollabsio/coolify:next-arm64 tags: coollabsio/coolify:next-arm64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
amd64-making-something-cool: amd64:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
@@ -59,12 +59,12 @@ jobs:
context: . context: .
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: coollabsio/coolify:next-amd64,coollabsio/coolify:next-test tags: coollabsio/coolify:next-amd64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
merge-manifest-to-be-cool: merge-manifest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [arm64-making-something-cool, amd64-making-something-cool] needs: [arm64, amd64]
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v3

4
.gitignore vendored
View File

@@ -12,4 +12,6 @@ client
apps/api/db/*.db apps/api/db/*.db
local-serve local-serve
apps/api/db/migration.db-journal apps/api/db/migration.db-journal
apps/api/core* apps/api/core*
logs
others/certificates

View File

@@ -1,256 +0,0 @@
# 👋 Welcome
First of all, thank you for considering contributing to my project! It means a lot 💜.
## 🙋 Want to help?
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
Follow the [introduction](#introduction) to get started then start contributing!
This is a little list of what you can do to help the project:
- [🧑‍💻 Develop your own ideas](#developer-contribution)
- [🌐 Translate the project](#translation)
## 👋 Introduction
### Setup with Github codespaces
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
### Setup with Gitpod
If you have a [Gitpod](https://gitpod.io), you can just create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
### Setup locally in your machine
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. Consider using Gitpod or Github Codespaces.
#### Recommended Pull Request Guideline
- Fork the project
- Clone your fork repo to local
- Create a new branch
- Push to your fork repo
- Create a pull request: https://github.com/coollabsio/coolify/compare
- Write a proper description
- Open the pull request to review against `next` branch
---
# 🧑‍💻 Developer contribution
## Technical skills required
- **Languages**: Node.js / Javascript / Typescript
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
- **Docker Engine API**
---
## How to start after you set up your local fork?
### Prerequisites
1. Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
2. You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
3. You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
4. You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
Optional:
4. To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
### Steps for local setup
1. Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
2. Install dependencies with `pnpm install`.
3. Need to create a local SQlite database with `pnpm db:push`.
This will apply all migrations at `db/dev.db`.
4. Seed the database with base entities with `pnpm db:seed`
5. You can start coding after starting `pnpm dev`.
---
## Database migrations
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
---
## How to add new services
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
- Self-hostable (obviously)
- Open-source
- Maintained (I do not want to add software full of bugs)
## Backend
There are 5 steps you should make on the backend side.
1. Create Prisma / database schema for the new service.
2. Add supported versions of the service.
3. Update global functions.
4. Create API endpoints.
5. Define automatically generated variables.
> I will use [Umami](https://umami.is/) as an example service.
### Create Prisma / Database schema for the new service.
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
- Add new model with the new service name.
- Make a relationship with `Service` model.
- In the `Service` model, the name of the new field should be with low-capital.
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
> You must restart the running development environment to be able to use the new model
> If you use VSCode/TLS, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running environment.
### Add supported versions
Supported versions are hardcoded into Coolify (for now).
You need to update `supportedServiceTypesAndVersions` function at [apps/api/src/lib/services/supportedVersions.ts](apps/api/src/lib/services/supportedVersions.ts). Example JSON:
```js
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
```
### Add required functions/properties
1. Add the new service to the `includeServices` variable in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts), so it will be included in all places in the database queries where it is required.
```js
const include: any = {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true,
meiliSearch: true,
umami: true // This line!
};
```
2. Update the database update query with the new service type to `configureServiceType` function in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
```js
[...]
else if (type === 'umami') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword());
const postgresqlDatabase = 'umami';
const hashSalt = encrypt(generatePassword(64));
await prisma.service.update({
where: { id },
data: {
type,
umami: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
hashSalt,
}
}
}
});
}
```
3. Add field details to [apps/api/src/lib/services/serviceFields.ts](apps/api/src/lib/services/serviceFields.ts), so every component will know what to do with the values (decrypt/show it by default/readonly)
```js
export const umami = [{
name: 'postgresqlUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
}]
```
4. Add service deletion query to `removeService` function in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts)
5. Add start process for the new service in [apps/api/src/lib/services/handlers.ts](apps/api/src/lib/services/handlers.ts)
> See startUmamiService() function as example.
6. Add the newly added start process to `startService` in [apps/api/src/routes/api/v1/services/handlers.ts](apps/api/src/routes/api/v1/services/handlers.ts)
7. You need to add a custom logo at [apps/ui/src/lib/components/svg/services](apps/ui/src/lib/components/svg/services) as a svelte component and export it in [apps/ui/src/lib/components/svg/services/index.ts](apps/ui/src/lib/components/svg/services/index.ts)
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
8. You need to include it the logo at:
- [apps/ui/src/lib/components/svg/services/ServiceIcons.svelte](apps/ui/src/lib/components/svg/services/ServiceIcons.svelte) with `isAbsolute`.
- [apps/ui/src/routes/services/[id]/_ServiceLinks.svelte](apps/ui/src/routes/services/[id]/_ServiceLinks.svelte) with the link to the docs/main site of the service
9. By default the URL and the name frontend forms are included in [apps/ui/src/routes/services/[id]/_Services/_Services.svelte](apps/ui/src/routes/services/[id]/_Services/_Services.svelte).
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [apps/ui/src/routes/services/[id]/_Services](apps/ui/src/routes/services/[id]/_Services) with an underscore.
> For example, see other [here](apps/ui/src/routes/services/[id]/_Services/_Umami.svelte).
Good job! 👏
<!-- # 🌐 Translate the project
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
### Installation
You must have gone throw all the [intro](#introduction) steps before you can start translating.
It's only an advice, but I recommend you to use:
- Visual Studio Code
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
### Adding a language
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
3. Have fun translating! -->

123
CONTRIBUTION.md Normal file
View File

@@ -0,0 +1,123 @@
# Contribution
First, thanks for considering to contribute to my project. It really means a lot! :)
You can ask for guidance anytime on our Discord server in the #contribution channel.
## Setup your development environment
### Container based development flow (recommended and the easiest)
All you need is to intall [Docker Engine 20.11+](https://docs.docker.com/engine/install/) on your local machine and run `pnpm dev:container`. It will build the base image for Coolify and start the development server inside Docker. All required ports (3000, 3001) will be exposed to your host.
### Github codespaces
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
### Gitpod
1. Use [container based development flow](#container-based-development-flow-easiest)
2. Or setup your workspace manually:
Create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
> Some packages, just `pack` are not installed in this way. You cannot test all the features. Please use the [container based development flow](#container-based-development-flow-easiest).
### Local Machine
> At the moment, Coolify `doesn't support Windows`. You must use `Linux` or `MacOS` or consider using Gitpod or Github Codespaces.
Install all the prerequisites manually to your host system. If you would not like to install anything, I suggest to use the [container based development flow](#container-based-development-flow-easiest).
- Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
- You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
- You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
- You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
Optional:
- To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
### Inside a Docker container
`WIP`
## Setup Coolify
- Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
- `pnpm install` to install dependencies.
- `pnpm db:push` to o create a local SQlite database.
This will apply all migrations at `db/dev.db`.
- `pnpm db:seed` seed the database.
- `pnpm dev` start coding.
## Technical skills required
- **Languages**: Node.js / Javascript / Typescript
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
- **Docker Engine API**
## Add a new service
### Which service is eligable to add to Coolify?
The following statements needs to be true:
- Self-hostable
- Open-source
- Maintained (I do not want to add software full of bugs)
### Create Prisma / Database schema for the new service.
All data that needs to be persist for a service should be saved to the database in `cleartext` or `encrypted`.
very password/api key/passphrase needs to be encrypted. If you are not sure, whether it should be encrypted or not, just encrypt it.
Update Prisma schema in [src/apps/api/prisma/schema.prisma](https://github.com/coollabsio/coolify/blob/main/apps/api/prisma/schema.prisma).
- Add new model with the new service name.
- Make a relationship with `Service` model.
- In the `Service` model, the name of the new field should be with low-capital.
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
Once done, create Prisma schema with `pnpm db:push`.
> You may also need to restart `Typescript Language Server` in your IDE to get the new types.
### Add available versions
Versions are hardcoded into Coolify at the moment and based on Docker image tags.
- Update `supportedServiceTypesAndVersions` function [here](apps/api/src/lib/services/supportedVersions.ts)
### Include the new service in queries
At [here](apps/api/src/lib/services/common.ts) in `includeServices` function add the new table name, so it will be included in all places in the database queries where it is required.
### Define auto-generated fields
At [here](apps/api/src/lib/services/common.ts) in `configureServiceType` function add the initial auto-generated details such as password, users etc, and the encryption process of secrets (if applicable).
### Define input field details
At [here](apps/api/src/lib/services/serviceFields.ts) add details about the input fields shown in the UI, so every component (API/UI) will know what to do with the values (decrypt/show it by default/readonly/etc).
### Define the start process
- At [here](apps/api/src/lib/services/handlers.ts), define how the service should start. It could be complex and based on `docker-compose` definitions.
> See `startUmamiService()` function as example.
- At [here](apps/api/src/routes/api/v1/services/handlers.ts), add the new start service process to `startService` function.
### Define the deletion process
[Here](apps/api/src/lib/services/common.ts) in `removeService` add the database deletion process.
### Custom logo
- At [here](apps/ui/src/lib/components/svg/services) add the service custom log as a Svelte component and export it [here](apps/ui/src/lib/components/svg/services/index.ts).
> SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
- At [here](apps/ui/src/lib/components/svg/services/ServiceIcons.svelte) include the new logo with `isAbsolute` property.
- At [here](apps/ui/src/routes/services/[id]/_ServiceLinks.svelte) add links to the documentation of the service.
### Custom fields on the UI
By default the URL and name are shown on the UI. Everything else needs to be added [here](apps/ui/src/routes/services/[id]/_Services/_Services.svelte)
> If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component [here](apps/ui/src/routes/services/[id]/_Services) with an underscore. For example, see other [here](apps/ui/src/routes/services/[id]/_Services/_Umami.svelte).
Good job! 👏

View File

@@ -1,108 +0,0 @@
---
head:
- - meta
- name: description
content: Coolify - Databases
- - meta
- name: keywords
content: databases coollabs coolify
- - meta
- name: twitter:card
content: summary_large_image
- - meta
- name: twitter:site
content: '@andrasbacsai'
- - meta
- name: twitter:title
content: Coolify
- - meta
- name: twitter:description
content: An open-source & self-hostable Heroku / Netlify alternative.
- - meta
- name: twitter:image
content: https://cdn.coollabs.io/assets/coollabs/og-image-databases.png
- - meta
- property: og:type
content: website
- - meta
- property: og:url
content: https://coolify.io
- - meta
- property: og:title
content: Coolify
- - meta
- property: og:description
content: An open-source & self-hostable Heroku / Netlify alternative.
- - meta
- property: og:site_name
content: Coolify
- - meta
- property: og:image
content: https://cdn.coollabs.io/assets/coollabs/og-image-databases.png
---
# Contribution
First, thanks for considering to contribute to my project. It really means a lot! :)
You can ask for guidance anytime on our Discord server in the #contribution channel.
## Setup your development environment
### Github codespaces
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
### Gitpod
If you have a [Gitpod](https://gitpod.io), you can just create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
### Local Machine
> At the moment, Coolify `doesn't support Windows`. You must use `Linux` or `MacOS` or consider using Gitpod or Github Codespaces.
- Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
- You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
- You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
- You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
Optional:
- To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
### Inside a Docker container
`WIP`
## Setup Coolify
- Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
- `pnpm install` to install dependencies.
- `pnpm db:push` to o create a local SQlite database.
This will apply all migrations at `db/dev.db`.
- `pnpm db:seed` seed the database.
- `pnpm dev` start coding.
## Technical skills required
- **Languages**: Node.js / Javascript / Typescript
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
- **Docker Engine API**
## Add a new service
### Which service is eligable to add to Coolify?
The following statements needs to be true:
- Self-hostable
- Open-source
- Maintained (I do not want to add software full of bugs)
### Create Prisma / Database schema for the new service.
All data that needs to be persist for a service should be saved to the database in `cleartext` or `encrypted`.
very password/api key/passphrase needs to be encrypted. If you are not sure, whether it should be encrypted or not, just encrypt it.
Update Prisma schema in [src/api/prisma/schema.prisma](https://github.com/coollabsio/coolify/blob/main/apps/api/prisma/schema.prisma).
- Add new model with the new service name.
- Make a relationship with `Service` model.
- In the `Service` model, the name of the new field should be with low-capital.
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.

View File

@@ -1,5 +1,4 @@
ARG PNPM_VERSION=7.11.0 ARG PNPM_VERSION=7.11.0
ARG NPM_VERSION=8.19.1
FROM node:18-slim as build FROM node:18-slim as build
WORKDIR /app WORKDIR /app
@@ -17,22 +16,29 @@ WORKDIR /app
ENV NODE_ENV production ENV NODE_ENV production
ARG TARGETPLATFORM ARG TARGETPLATFORM
# https://download.docker.com/linux/static/stable/
ARG DOCKER_VERSION=20.10.18
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
ARG DOCKER_COMPOSE_VERSION=2.6.1
# https://github.com/buildpacks/pack/releases
ARG PACK_VERSION=v0.27.0
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3 RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/ RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION} RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN npm install -g npm@${PNPM_VERSION} RUN npm install -g npm@${PNPM_VERSION}
RUN mkdir -p ~/.docker/cli-plugins/ RUN mkdir -p ~/.docker/cli-plugins/
# https://download.docker.com/linux/static/stable/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.6.1 -o ~/.docker/cli-plugins/docker-compose
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
RUN (curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.27.0/pack-v0.27.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack) RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
COPY --from=build /app/apps/api/build/ . COPY --from=build /app/apps/api/build/ .
COPY --from=build /app/others/fluentbit/ ./fluentbit
COPY --from=build /app/apps/ui/build/ ./public COPY --from=build /app/apps/ui/build/ ./public
COPY --from=build /app/apps/api/prisma/ ./prisma COPY --from=build /app/apps/api/prisma/ ./prisma
COPY --from=build /app/apps/api/package.json . COPY --from=build /app/apps/api/package.json .

31
Dockerfile-dev Normal file
View File

@@ -0,0 +1,31 @@
FROM node:18-slim
ENV NODE_ENV development
ARG TARGETPLATFORM
ARG PNPM_VERSION=7.11.0
ARG NPM_VERSION=8.19.1
# https://download.docker.com/linux/static/stable/
ARG DOCKER_VERSION=20.10.18
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
ARG DOCKER_COMPOSE_VERSION=2.6.1
# https://github.com/buildpacks/pack/releases
ARG PACK_VERSION=v0.27.0
WORKDIR /app
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN npm install -g npm@${PNPM_VERSION}
RUN mkdir -p ~/.docker/cli-plugins/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
EXPOSE 3000
ENV CHECKPOINT_DISABLE=1

View File

@@ -3,41 +3,46 @@
"description": "Coolify's Fastify API", "description": "Coolify's Fastify API",
"license": "Apache-2.0", "license": "Apache-2.0",
"scripts": { "scripts": {
"db:generate":"prisma generate",
"db:push": "prisma db push && prisma generate", "db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed", "db:seed": "prisma db seed",
"db:studio": "prisma studio", "db:studio": "prisma studio",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name", "db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"dev": "nodemon", "dev": "nodemon",
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs", "build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --minify=true --platform=node --outdir=build --format=cjs",
"format": "prettier --write 'src/**/*.{js,ts,json,md}'", "format": "prettier --write 'src/**/*.{js,ts,json,md}'",
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .", "lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js" "start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
}, },
"dependencies": { "dependencies": {
"@breejs/ts-worker": "2.0.0", "@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.3.1", "@fastify/autoload": "5.4.0",
"@fastify/cookie": "8.1.0", "@fastify/cookie": "8.3.0",
"@fastify/cors": "8.1.0", "@fastify/cors": "8.1.0",
"@fastify/env": "4.1.0", "@fastify/env": "4.1.0",
"@fastify/jwt": "6.3.2", "@fastify/jwt": "6.3.2",
"@fastify/multipart": "7.2.0",
"@fastify/static": "6.5.0", "@fastify/static": "6.5.0",
"@iarna/toml": "2.2.5", "@iarna/toml": "2.2.5",
"@ladjs/graceful": "3.0.2", "@ladjs/graceful": "3.0.2",
"@prisma/client": "4.3.1", "@prisma/client": "4.4.0",
"prisma": "4.4.0",
"axios": "0.27.2", "axios": "0.27.2",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bree": "9.1.2", "bree": "9.1.2",
"cabin": "9.1.2", "cabin": "9.1.2",
"compare-versions": "5.0.1", "compare-versions": "5.0.1",
"csv-parse": "5.3.1",
"csvtojson": "2.0.10",
"cuid": "2.1.8", "cuid": "2.1.8",
"dayjs": "1.11.5", "dayjs": "1.11.5",
"dockerode": "3.3.4", "dockerode": "3.3.4",
"dotenv-extended": "2.9.0", "dotenv-extended": "2.9.0",
"execa": "6.1.0", "execa": "6.1.0",
"fastify": "4.5.3", "fastify": "4.8.1",
"fastify-plugin": "4.2.1", "fastify-plugin": "4.2.1",
"generate-password": "1.7.0", "generate-password": "1.7.0",
"got": "12.4.1", "got": "12.5.2",
"is-ip": "5.0.0", "is-ip": "5.0.0",
"is-port-reachable": "4.0.0", "is-port-reachable": "4.0.0",
"js-yaml": "4.1.0", "js-yaml": "4.1.0",
@@ -47,25 +52,26 @@
"p-all": "4.0.0", "p-all": "4.0.0",
"p-throttle": "5.0.0", "p-throttle": "5.0.0",
"public-ip": "6.0.1", "public-ip": "6.0.1",
"pump": "^3.0.0",
"ssh-config": "4.1.6", "ssh-config": "4.1.6",
"strip-ansi": "7.0.1", "strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1" "unique-names-generator": "4.7.1"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "18.7.15", "@types/node": "18.8.5",
"@types/node-os-utils": "1.3.0", "@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.36.2", "@typescript-eslint/eslint-plugin": "5.38.1",
"@typescript-eslint/parser": "5.36.2", "@typescript-eslint/parser": "5.38.1",
"esbuild": "0.15.7", "esbuild": "0.15.10",
"eslint": "8.23.0", "eslint": "8.25.0",
"eslint-config-prettier": "8.5.0", "eslint-config-prettier": "8.5.0",
"eslint-plugin-prettier": "4.2.1", "eslint-plugin-prettier": "4.2.1",
"nodemon": "2.0.19", "nodemon": "2.0.20",
"prettier": "2.7.1", "prettier": "2.7.1",
"prisma": "4.3.1",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"tsconfig-paths": "4.1.0", "tsconfig-paths": "4.1.0",
"typescript": "4.8.2" "typescript": "4.8.4"
}, },
"prisma": { "prisma": {
"seed": "node prisma/seed.js" "seed": "node prisma/seed.js"

View File

@@ -0,0 +1,13 @@
-- CreateTable
CREATE TABLE "DatabaseSecret" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"value" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"databaseId" TEXT NOT NULL,
CONSTRAINT "DatabaseSecret_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "DatabaseSecret_name_databaseId_key" ON "DatabaseSecret"("name", "databaseId");

View File

@@ -0,0 +1,18 @@
-- AlterTable
ALTER TABLE "Build" ADD COLUMN "previewApplicationId" TEXT;
-- CreateTable
CREATE TABLE "PreviewApplication" (
"id" TEXT NOT NULL PRIMARY KEY,
"pullmergeRequestId" TEXT NOT NULL,
"sourceBranch" TEXT NOT NULL,
"isRandomDomain" BOOLEAN NOT NULL DEFAULT false,
"customDomain" TEXT,
"applicationId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "PreviewApplication_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "PreviewApplication_applicationId_key" ON "PreviewApplication"("applicationId");

View File

@@ -0,0 +1,10 @@
-- CreateTable
CREATE TABLE "Certificate" (
"id" TEXT NOT NULL PRIMARY KEY,
"key" TEXT NOT NULL,
"cert" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "Certificate_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);

View File

@@ -0,0 +1,23 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ApplicationSettings" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"debug" BOOLEAN NOT NULL DEFAULT false,
"previews" BOOLEAN NOT NULL DEFAULT false,
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
"isBot" BOOLEAN NOT NULL DEFAULT false,
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
DROP TABLE "ApplicationSettings";
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,26 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_GitSource" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"forPublic" BOOLEAN NOT NULL DEFAULT false,
"type" TEXT,
"apiUrl" TEXT,
"htmlUrl" TEXT,
"customPort" INTEGER NOT NULL DEFAULT 22,
"organization" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
DROP TABLE "GitSource";
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- DropIndex
DROP INDEX "PreviewApplication_applicationId_key";

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Build" ADD COLUMN "sourceRepository" TEXT;

View File

@@ -0,0 +1,3 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerComposeFile" TEXT;
ALTER TABLE "Application" ADD COLUMN "dockerComposeFileLocation" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerComposeConfiguration" TEXT;

View File

@@ -8,6 +8,16 @@ datasource db {
url = env("COOLIFY_DATABASE_URL") url = env("COOLIFY_DATABASE_URL")
} }
model Certificate {
id String @id @default(cuid())
key String
cert String
team Team? @relation(fields: [teamId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
teamId String?
}
model Setting { model Setting {
id String @id @default(cuid()) id String @id @default(cuid())
fqdn String? @unique fqdn String? @unique
@@ -70,6 +80,7 @@ model Team {
gitLabApps GitlabApp[] gitLabApps GitlabApp[]
service Service[] service Service[]
users User[] users User[]
certificate Certificate[]
} }
model TeamInvitation { model TeamInvitation {
@@ -83,42 +94,58 @@ model TeamInvitation {
} }
model Application { model Application {
id String @id @default(cuid()) id String @id @default(cuid())
name String name String
fqdn String? fqdn String?
repository String? repository String?
configHash String? configHash String?
branch String? branch String?
buildPack String? buildPack String?
projectId Int? projectId Int?
port Int? port Int?
exposePort Int? exposePort Int?
installCommand String? installCommand String?
buildCommand String? buildCommand String?
startCommand String? startCommand String?
baseDirectory String? baseDirectory String?
publishDirectory String? publishDirectory String?
deploymentType String? deploymentType String?
phpModules String? phpModules String?
pythonWSGI String? pythonWSGI String?
pythonModule String? pythonModule String?
pythonVariable String? pythonVariable String?
dockerFileLocation String? dockerFileLocation String?
denoMainFile String? denoMainFile String?
denoOptions String? denoOptions String?
createdAt DateTime @default(now()) dockerComposeFile String?
updatedAt DateTime @updatedAt dockerComposeFileLocation String?
destinationDockerId String? dockerComposeConfiguration String?
gitSourceId String? createdAt DateTime @default(now())
baseImage String? updatedAt DateTime @updatedAt
baseBuildImage String? destinationDockerId String?
gitSource GitSource? @relation(fields: [gitSourceId], references: [id]) gitSourceId String?
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id]) baseImage String?
persistentStorage ApplicationPersistentStorage[] baseBuildImage String?
settings ApplicationSettings? gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
secrets Secret[] destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
teams Team[] persistentStorage ApplicationPersistentStorage[]
connectedDatabase ApplicationConnectedDatabase? settings ApplicationSettings?
secrets Secret[]
teams Team[]
connectedDatabase ApplicationConnectedDatabase?
previewApplication PreviewApplication[]
}
model PreviewApplication {
id String @id @default(cuid())
pullmergeRequestId String
sourceBranch String
isRandomDomain Boolean @default(false)
customDomain String?
applicationId String
application Application @relation(fields: [applicationId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
} }
model ApplicationConnectedDatabase { model ApplicationConnectedDatabase {
@@ -148,6 +175,7 @@ model ApplicationSettings {
isBot Boolean @default(false) isBot Boolean @default(false)
isPublicRepository Boolean @default(false) isPublicRepository Boolean @default(false)
isDBBranching Boolean @default(false) isDBBranching Boolean @default(false)
isCustomSSL Boolean @default(false)
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
application Application @relation(fields: [applicationId], references: [id]) application Application @relation(fields: [applicationId], references: [id])
@@ -210,21 +238,23 @@ model BuildLog {
} }
model Build { model Build {
id String @id @default(cuid()) id String @id @default(cuid())
type String type String
applicationId String? applicationId String?
destinationDockerId String? destinationDockerId String?
gitSourceId String? gitSourceId String?
githubAppId String? githubAppId String?
gitlabAppId String? gitlabAppId String?
commit String? commit String?
pullmergeRequestId String? pullmergeRequestId String?
forceRebuild Boolean @default(false) previewApplicationId String?
sourceBranch String? forceRebuild Boolean @default(false)
branch String? sourceBranch String?
status String? @default("queued") sourceRepository String?
createdAt DateTime @default(now()) branch String?
updatedAt DateTime @updatedAt status String? @default("queued")
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
} }
model DestinationDocker { model DestinationDocker {
@@ -273,6 +303,7 @@ model GitSource {
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
githubAppId String? @unique githubAppId String? @unique
gitlabAppId String? @unique gitlabAppId String? @unique
isSystemWide Boolean @default(false)
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id]) gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
githubApp GithubApp? @relation(fields: [githubAppId], references: [id]) githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
application Application[] application Application[]
@@ -328,6 +359,19 @@ model Database {
settings DatabaseSettings? settings DatabaseSettings?
teams Team[] teams Team[]
applicationConnectedDatabase ApplicationConnectedDatabase[] applicationConnectedDatabase ApplicationConnectedDatabase[]
databaseSecret DatabaseSecret[]
}
model DatabaseSecret {
id String @id @default(cuid())
name String
value String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
databaseId String
database Database @relation(fields: [databaseId], references: [id])
@@unique([name, databaseId])
} }
model DatabaseSettings { model DatabaseSettings {

View File

@@ -94,6 +94,16 @@ async function main() {
} }
}); });
} }
// Set new preview secrets
const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } })
if (secrets.length > 0) {
for (const secret of secrets) {
const previewSecrets = await prisma.secret.findMany({ where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true } })
if (previewSecrets.length === 0) {
await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } })
}
}
}
} }
main() main()
.catch((e) => { .catch((e) => {

View File

@@ -3,12 +3,17 @@ import cors from '@fastify/cors';
import serve from '@fastify/static'; import serve from '@fastify/static';
import env from '@fastify/env'; import env from '@fastify/env';
import cookie from '@fastify/cookie'; import cookie from '@fastify/cookie';
import multipart from '@fastify/multipart';
import path, { join } from 'path'; import path, { join } from 'path';
import autoLoad from '@fastify/autoload'; import autoLoad from '@fastify/autoload';
import { asyncExecShell, createRemoteEngineConfiguration, getDomain, isDev, listSettings, prisma, version } from './lib/common'; import { asyncExecShell, cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, executeDockerCmd, executeSSHCmd, generateDatabaseConfiguration, getDomain, isDev, listSettings, prisma, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
import { scheduler } from './lib/scheduler'; import { scheduler } from './lib/scheduler';
import { compareVersions } from 'compare-versions'; import { compareVersions } from 'compare-versions';
import Graceful from '@ladjs/graceful' import Graceful from '@ladjs/graceful'
import axios from 'axios';
import fs from 'fs/promises';
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
import { checkContainer } from './lib/docker';
declare module 'fastify' { declare module 'fastify' {
interface FastifyInstance { interface FastifyInstance {
config: { config: {
@@ -26,11 +31,13 @@ declare module 'fastify' {
const port = isDev ? 3001 : 3000; const port = isDev ? 3001 : 3000;
const host = '0.0.0.0'; const host = '0.0.0.0';
prisma.setting.findFirst().then(async (settings) => { (async () => {
const settings = await prisma.setting.findFirst()
const fastify = Fastify({ const fastify = Fastify({
logger: settings?.isAPIDebuggingEnabled || false, logger: settings?.isAPIDebuggingEnabled || false,
trustProxy: true trustProxy: true
}); });
const schema = { const schema = {
type: 'object', type: 'object',
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'], required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
@@ -68,7 +75,6 @@ prisma.setting.findFirst().then(async (settings) => {
} }
}; };
const options = { const options = {
schema, schema,
dotenv: true dotenv: true
@@ -88,13 +94,13 @@ prisma.setting.findFirst().then(async (settings) => {
return reply.status(200).sendFile('index.html'); return reply.status(200).sendFile('index.html');
}); });
} }
fastify.register(multipart, { limits: { fileSize: 100000 } });
fastify.register(autoLoad, { fastify.register(autoLoad, {
dir: join(__dirname, 'plugins') dir: join(__dirname, 'plugins')
}); });
fastify.register(autoLoad, { fastify.register(autoLoad, {
dir: join(__dirname, 'routes') dir: join(__dirname, 'routes')
}); });
fastify.register(cookie) fastify.register(cookie)
fastify.register(cors); fastify.register(cors);
fastify.addHook('onRequest', async (request, reply) => { fastify.addHook('onRequest', async (request, reply) => {
@@ -115,11 +121,8 @@ prisma.setting.findFirst().then(async (settings) => {
// console.log('not allowed', request.headers.host) // console.log('not allowed', request.headers.host)
} }
}) })
fastify.listen({ port, host }, async (err: any, address: any) => { try {
if (err) { await fastify.listen({ port, host })
console.error(err);
process.exit(1);
}
console.log(`Coolify's API is listening on ${host}:${port}`); console.log(`Coolify's API is listening on ${host}:${port}`);
await initServer(); await initServer();
@@ -130,49 +133,52 @@ prisma.setting.findFirst().then(async (settings) => {
if (!scheduler.workers.has('deployApplication')) { if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication'); scheduler.run('deployApplication');
} }
if (!scheduler.workers.has('infrastructure')) {
scheduler.run('infrastructure');
}
}, 2000) }, 2000)
// autoUpdater // autoUpdater
setInterval(async () => { setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:autoUpdater") await autoUpdater()
}, isDev ? 5000 : 60000 * 15) }, 60000 * 15)
// cleanupStorage // cleanupStorage
setInterval(async () => { setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupStorage") await cleanupStorage()
}, isDev ? 6000 : 60000 * 10) }, 60000 * 10)
// checkProxies // checkProxies and checkFluentBit
setInterval(async () => { setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkProxies") await checkProxies();
await checkFluentBit();
}, 10000) }, 10000)
// cleanupPrismaEngines setInterval(async () => {
// setInterval(async () => { await copySSLCertificates();
// scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupPrismaEngines") }, 2000)
// }, 60000)
await Promise.all([ await Promise.all([
getArch(), getArch(),
getIPAddress(), getIPAddress(),
configureRemoteDockers(), configureRemoteDockers(),
]) ])
}); } catch (error) {
}) console.error(error);
process.exit(1);
}
})();
async function getIPAddress() { async function getIPAddress() {
const { publicIpv4, publicIpv6 } = await import('public-ip') const { publicIpv4, publicIpv6 } = await import('public-ip')
try { try {
const settings = await listSettings(); const settings = await listSettings();
if (!settings.ipv4) { if (!settings.ipv4) {
console.log(`Getting public IPv4 address...`);
const ipv4 = await publicIpv4({ timeout: 2000 }) const ipv4 = await publicIpv4({ timeout: 2000 })
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } }) await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
} }
if (!settings.ipv6) { if (!settings.ipv6) {
console.log(`Getting public IPv6 address...`);
const ipv6 = await publicIpv6({ timeout: 2000 }) const ipv6 = await publicIpv6({ timeout: 2000 })
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } }) await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
} }
@@ -181,6 +187,7 @@ async function getIPAddress() {
} }
async function initServer() { async function initServer() {
try { try {
console.log(`Initializing server...`);
await asyncExecShell(`docker network create --attachable coolify`); await asyncExecShell(`docker network create --attachable coolify`);
} catch (error) { } } catch (error) { }
try { try {
@@ -194,6 +201,7 @@ async function getArch() {
try { try {
const settings = await prisma.setting.findFirst({}) const settings = await prisma.setting.findFirst({})
if (settings && !settings.arch) { if (settings && !settings.arch) {
console.log(`Getting architecture...`);
await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } }) await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } })
} }
} catch (error) { } } catch (error) { }
@@ -205,9 +213,247 @@ async function configureRemoteDockers() {
where: { remoteVerified: true, remoteEngine: true } where: { remoteVerified: true, remoteEngine: true }
}); });
if (remoteDocker.length > 0) { if (remoteDocker.length > 0) {
console.log(`Verifying Remote Docker Engines...`);
for (const docker of remoteDocker) { for (const docker of remoteDocker) {
await createRemoteEngineConfiguration(docker.id) console.log('Verifying:', docker.remoteIpAddress)
await verifyRemoteDockerEngineFn(docker.id);
}
}
} catch (error) {
console.log(error)
}
}
async function autoUpdater() {
try {
const currentVersion = version;
const { data: versions } = await axios
.get(
`https://get.coollabs.io/versions.json`
, {
params: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = 0
if (activeCount === 0) {
if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
await asyncExecShell(
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
}
} else {
console.log('Updating (not really in dev mode).');
}
} }
} }
} catch (error) { } } catch (error) { }
} }
async function checkFluentBit() {
try {
if (!isDev) {
const engine = '/var/run/docker.sock';
const { id } = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify' }
});
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
if (!found) {
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
await asyncExecShell(`docker compose up -d fluent-bit`);
}
}
} catch (error) {
console.log(error)
}
}
async function checkProxies() {
try {
const { default: isReachable } = await import('is-port-reachable');
let portReachable;
const { arch, ipv4, ipv6 } = await listSettings();
// Coolify Proxy local
const engine = '/var/run/docker.sock';
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
});
if (localDocker) {
portReachable = await isReachable(80, { host: ipv4 || ipv6 })
if (!portReachable) {
await startTraefikProxy(localDocker.id);
}
}
// Coolify Proxy remote
const remoteDocker = await prisma.destinationDocker.findMany({
where: { remoteEngine: true, remoteVerified: true }
});
if (remoteDocker.length > 0) {
for (const docker of remoteDocker) {
if (docker.isCoolifyProxyUsed) {
portReachable = await isReachable(80, { host: docker.remoteIpAddress })
if (!portReachable) {
await startTraefikProxy(docker.id);
}
}
try {
await createRemoteEngineConfiguration(docker.id)
} catch (error) { }
}
}
// TCP Proxies
const databasesWithPublicPort = await prisma.database.findMany({
where: { publicPort: { not: null } },
include: { settings: true, destinationDocker: true }
});
for (const database of databasesWithPublicPort) {
const { destinationDockerId, destinationDocker, publicPort, id } = database;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
const { privatePort } = generateDatabaseConfiguration(database, arch);
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
}
}
const wordpressWithFtp = await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const ftp of wordpressWithFtp) {
const { service, ftpPublicPort } = ftp;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
}
}
// HTTP Proxies
const minioInstances = await prisma.minio.findMany({
where: { publicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const minio of minioInstances) {
const { service, publicPort } = minio;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
}
}
} catch (error) {
}
}
async function copySSLCertificates() {
try {
const pAll = await import('p-all');
const actions = []
const certificates = await prisma.certificate.findMany({ include: { team: true } })
const teamIds = certificates.map(c => c.teamId)
const destinations = await prisma.destinationDocker.findMany({ where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } } })
for (const certificate of certificates) {
const { id, key, cert } = certificate
const decryptedKey = decrypt(key)
await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey)
await fs.writeFile(`/tmp/${id}-cert.pem`, cert)
for (const destination of destinations) {
if (destination.remoteEngine) {
if (destination.remoteVerified) {
const { id: dockerId, remoteIpAddress } = destination
actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress))
}
} else {
actions.push(async () => copyLocalCertificates(id))
}
}
}
await pAll.default(actions, { concurrency: 1 })
} catch (error) {
console.log(error)
} finally {
await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`)
}
}
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
try {
await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`)
await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
} catch (error) {
console.log({ error })
}
}
async function copyLocalCertificates(id: string) {
try {
await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`)
await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`)
await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`)
} catch (error) {
console.log({ error })
}
}
async function cleanupStorage() {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set()
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
if (destination.engine) enginesDone.add(destination.engine)
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
let lowDiskSpace = false;
try {
let stdout = null
if (!isDev) {
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
stdout = output.stdout;
} else {
const output = await asyncExecShell(
`df -kPT /`
);
stdout = output.stdout;
}
let lines = stdout.trim().split('\n');
let header = lines[0];
let regex =
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
const boundaries = [];
let match;
while ((match = regex.exec(header))) {
boundaries.push(match[0].length);
}
boundaries[boundaries.length - 1] = -1;
const data = lines.slice(1).map((line) => {
const cl = boundaries.map((boundary) => {
const column = boundary > 0 ? line.slice(0, boundary) : line;
line = line.slice(boundary);
return column.trim();
});
return {
capacity: Number.parseInt(cl[5], 10) / 100
};
});
if (data.length > 0) {
const { capacity } = data[0];
if (capacity > 0.8) {
lowDiskSpace = true;
}
}
} catch (error) { }
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
}
}

View File

@@ -38,15 +38,22 @@ import * as buildpacks from '../lib/buildPacks';
for (const queueBuild of queuedBuilds) { for (const queueBuild of queuedBuilds) {
actions.push(async () => { actions.push(async () => {
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } }) let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild
application = decryptApplication(application) application = decryptApplication(application)
const originalApplicationId = application.id
if (pullmergeRequestId) {
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
if (previewApplications.length > 0) {
previewApplicationId = previewApplications[0].id
}
}
const usableApplicationId = previewApplicationId || originalApplicationId
try { try {
if (queueBuild.status === 'running') { if (queueBuild.status === 'running') {
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id }); await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
} }
const { const {
id: applicationId, id: applicationId,
repository,
name, name,
destinationDocker, destinationDocker,
destinationDockerId, destinationDockerId,
@@ -69,6 +76,7 @@ import * as buildpacks from '../lib/buildPacks';
} = application } = application
let { let {
branch, branch,
repository,
buildPack, buildPack,
port, port,
installCommand, installCommand,
@@ -77,6 +85,7 @@ import * as buildpacks from '../lib/buildPacks';
baseDirectory, baseDirectory,
publishDirectory, publishDirectory,
dockerFileLocation, dockerFileLocation,
dockerComposeConfiguration,
denoMainFile denoMainFile
} = application } = application
const currentHash = crypto const currentHash = crypto
@@ -104,17 +113,6 @@ import * as buildpacks from '../lib/buildPacks';
) )
.digest('hex'); .digest('hex');
const { debug } = settings; const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId; let imageId = applicationId;
let domain = getDomain(fqdn); let domain = getDomain(fqdn);
const volumes = const volumes =
@@ -127,8 +125,13 @@ import * as buildpacks from '../lib/buildPacks';
branch = sourceBranch; branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`; domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`; imageId = `${applicationId}-${pullmergeRequestId}`;
repository = sourceRepository || repository;
} }
try {
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration)
} catch (error) { }
let deployNeeded = true; let deployNeeded = true;
let destinationType; let destinationType;
@@ -146,7 +149,7 @@ import * as buildpacks from '../lib/buildPacks';
startCommand = configuration.startCommand; startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand; buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory; publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory; baseDirectory = configuration.baseDirectory || '';
dockerFileLocation = configuration.dockerFileLocation; dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile; denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({ const commit = await importers[gitSource.type]({
@@ -203,18 +206,37 @@ import * as buildpacks from '../lib/buildPacks';
// //
} }
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage); await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
if (forceRebuild) deployNeeded = true if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) { if (!imageFound || deployNeeded) {
// if (true) {
if (buildpacks[buildPack]) if (buildpacks[buildPack])
await buildpacks[buildPack]({ await buildpacks[buildPack]({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
network: destinationDocker.network,
buildId, buildId,
applicationId, applicationId,
domain, domain,
name, name,
type, type,
volumes,
labels,
pullmergeRequestId, pullmergeRequestId,
buildPack, buildPack,
repository, repository,
@@ -236,11 +258,12 @@ import * as buildpacks from '../lib/buildPacks';
pythonModule, pythonModule,
pythonVariable, pythonVariable,
dockerFileLocation, dockerFileLocation,
dockerComposeConfiguration,
denoMainFile, denoMainFile,
denoOptions, denoOptions,
baseImage, baseImage,
baseBuildImage, baseBuildImage,
deploymentType deploymentType,
}); });
else { else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId }); await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
@@ -249,112 +272,152 @@ import * as buildpacks from '../lib/buildPacks';
} else { } else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId }); await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
} }
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` }) if (buildPack === 'compose') {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` }) try {
} catch (error) { await executeDockerCmd({
// dockerId: destinationDockerId,
} command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
const envs = [ })
`PORT=${port}` await executeDockerCmd({
]; dockerId: destinationDockerId,
if (secrets.length > 0) { command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
secrets.forEach((secret) => { })
if (pullmergeRequestId) { } catch (error) {
if (secret.isPRMRSecret) { //
envs.push(`${secret.name}=${secret.value}`); }
} try {
} else { await executeDockerCmd({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
if (!secret.isPRMRSecret) { await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
envs.push(`${secret.name}=${secret.value}`); await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
} await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
} }
}); throw new Error(error);
} }
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({ } else {
applicationId, try {
fqdn, await executeDockerCmd({
name, dockerId: destinationDockerId,
type, command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
pullmergeRequestId, })
buildPack, await executeDockerCmd({
repository, dockerId: destinationDockerId,
branch, command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
projectId, })
port: exposePort ? `${exposePort}:${port}` : port, } catch (error) {
commit, //
installCommand, }
buildCommand, const envs = [
startCommand, `PORT=${port}`
baseDirectory, ];
publishDirectory if (secrets.length > 0) {
}); secrets.forEach((secret) => {
let envFound = false; if (pullmergeRequestId) {
try { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
envFound = !!(await fs.stat(`${workdir}/.env`)); if (isSecretFound.length > 0) {
} catch (error) { envs.push(`${secret.name}=${isSecretFound[0].value}`);
// } else {
} envs.push(`${secret.name}=${secret.value}`);
try { }
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId }); } else {
const composeVolumes = volumes.map((volume) => { if (!secret.isPRMRSecret) {
return { envs.push(`${secret.name}=${secret.value}`);
[`${volume.split(':')[0]}`]: { }
name: volume.split(':')[0]
} }
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
}; };
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
}); });
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.updateMany({
where: { id: buildId, status: { in: ['queued', 'running'] } },
data: { status: 'failed' }
});
throw new Error(error);
} }
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
} }
} }
catch (error) { catch (error) {
await prisma.build.updateMany({ const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
where: { id: buildId, status: { in: ['queued', 'running'] } }, if (foundBuild) {
data: { status: 'failed' } await prisma.build.update({
}); where: { id: buildId },
await saveBuildLog({ line: error, buildId, applicationId: application.id }); data: {
status: 'failed'
}
});
}
if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id });
}
} }
}); });
} }

View File

@@ -1,226 +0,0 @@
import { parentPort } from 'node:worker_threads';
import axios from 'axios';
import { compareVersions } from 'compare-versions';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version, createRemoteEngineConfiguration } from '../lib/common';
async function autoUpdater() {
try {
const currentVersion = version;
const { data: versions } = await axios
.get(
`https://get.coollabs.io/versions.json`
, {
params: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = 0
if (activeCount === 0) {
if (!isDev) {
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell(
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=true' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
);
} else {
console.log('Updating (not really in dev mode).');
}
}
}
} catch (error) { }
}
async function checkProxies() {
try {
const { default: isReachable } = await import('is-port-reachable');
let portReachable;
const { arch, ipv4, ipv6 } = await listSettings();
// Coolify Proxy local
const engine = '/var/run/docker.sock';
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
});
if (localDocker) {
portReachable = await isReachable(80, { host: ipv4 || ipv6 })
if (!portReachable) {
await startTraefikProxy(localDocker.id);
}
}
// Coolify Proxy remote
const remoteDocker = await prisma.destinationDocker.findMany({
where: { remoteEngine: true, remoteVerified: true }
});
if (remoteDocker.length > 0) {
for (const docker of remoteDocker) {
if (docker.isCoolifyProxyUsed) {
portReachable = await isReachable(80, { host: docker.remoteIpAddress })
if (!portReachable) {
await startTraefikProxy(docker.id);
}
}
try {
await createRemoteEngineConfiguration(docker.id)
} catch (error) { }
}
}
// TCP Proxies
const databasesWithPublicPort = await prisma.database.findMany({
where: { publicPort: { not: null } },
include: { settings: true, destinationDocker: true }
});
for (const database of databasesWithPublicPort) {
const { destinationDockerId, destinationDocker, publicPort, id } = database;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
const { privatePort } = generateDatabaseConfiguration(database, arch);
portReachable = await isReachable(publicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
}
}
}
const wordpressWithFtp = await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const ftp of wordpressWithFtp) {
const { service, ftpPublicPort } = ftp;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
portReachable = await isReachable(ftpPublicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
}
}
}
// HTTP Proxies
const minioInstances = await prisma.minio.findMany({
where: { publicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const minio of minioInstances) {
const { service, publicPort } = minio;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
portReachable = await isReachable(publicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
}
}
}
} catch (error) {
}
}
async function cleanupPrismaEngines() {
if (!isDev) {
try {
const { stdout } = await asyncExecShell(`ps -ef | grep /app/prisma-engines/query-engine | grep -v grep | wc -l | xargs`)
if (stdout.trim() != null && stdout.trim() != '' && Number(stdout.trim()) > 1) {
await asyncExecShell(`killall -q -e /app/prisma-engines/query-engine -o 1m`)
}
} catch (error) { }
}
}
async function cleanupStorage() {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set()
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
if (destination.engine) enginesDone.add(destination.engine)
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
let lowDiskSpace = false;
try {
let stdout = null
if (!isDev) {
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
stdout = output.stdout;
} else {
const output = await asyncExecShell(
`df -kPT /`
);
stdout = output.stdout;
}
let lines = stdout.trim().split('\n');
let header = lines[0];
let regex =
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
const boundaries = [];
let match;
while ((match = regex.exec(header))) {
boundaries.push(match[0].length);
}
boundaries[boundaries.length - 1] = -1;
const data = lines.slice(1).map((line) => {
const cl = boundaries.map((boundary) => {
const column = boundary > 0 ? line.slice(0, boundary) : line;
line = line.slice(boundary);
return column.trim();
});
return {
capacity: Number.parseInt(cl[5], 10) / 100
};
});
if (data.length > 0) {
const { capacity } = data[0];
if (capacity > 0.8) {
lowDiskSpace = true;
}
}
} catch (error) { }
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
}
}
(async () => {
let status = {
cleanupStorage: false,
autoUpdater: false
}
if (parentPort) {
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
process.exit(1);
}
if (message === 'action:cleanupStorage') {
if (!status.autoUpdater) {
status.cleanupStorage = true
await cleanupStorage();
status.cleanupStorage = false
}
return;
}
if (message === 'action:cleanupPrismaEngines') {
await cleanupPrismaEngines();
return;
}
if (message === 'action:checkProxies') {
await checkProxies();
return;
}
if (message === 'action:autoUpdater') {
if (!status.cleanupStorage) {
status.autoUpdater = true
await autoUpdater();
status.autoUpdater = false
}
return;
}
}
});
} else process.exit(0);
})();

View File

@@ -1,4 +1,4 @@
import { base64Encode, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common"; import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { day } from "../dayjs"; import { day } from "../dayjs";
@@ -342,13 +342,13 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
} }
if (buildPack === 'laravel') { if (buildPack === 'laravel') {
payload.baseImage = 'webdevops/php-apache:8.2-alpine'; payload.baseImage = 'webdevops/php-apache:8.2-alpine';
payload.baseImages = phpVersions;
payload.baseBuildImage = 'node:18'; payload.baseBuildImage = 'node:18';
payload.baseBuildImages = nodeVersions; payload.baseBuildImages = nodeVersions;
} }
if (buildPack === 'heroku') { if (buildPack === 'heroku') {
payload.baseImage = 'heroku/buildpacks:20'; payload.baseImage = 'heroku/buildpacks:20';
payload.baseImages = herokuVersions; payload.baseImages = herokuVersions;
} }
return payload; return payload;
} }
@@ -384,7 +384,7 @@ export const setDefaultConfiguration = async (data: any) => {
if (!publishDirectory) publishDirectory = template?.publishDirectory || null; if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (baseDirectory) { if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`; if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`; if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1);
} }
if (dockerFileLocation) { if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`; if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
@@ -461,17 +461,33 @@ export const saveBuildLog = async ({
buildId: string; buildId: string;
applicationId: string; applicationId: string;
}): Promise<any> => { }): Promise<any> => {
const { default: got } = await import('got')
if (line && typeof line === 'string' && line.includes('ghs_')) { if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g; const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@'); line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
} }
const addTimestamp = `[${generateTimestamp()}] ${line}`; const addTimestamp = `[${generateTimestamp()}] ${line}`;
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`); const fluentBitUrl = isDev ? process.env.COOLIFY_CONTAINER_DEV === 'true' ? 'http://coolify-fluentbit:24224' : 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
return await prisma.buildLog.create({
data: { if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId console.debug(`[${applicationId}] ${addTimestamp}`);
} }
}); try {
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
json: {
line: encrypt(line)
}
})
} catch (error) {
if (isDev) return
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
}
}; };
export async function copyBaseConfigurationFiles( export async function copyBaseConfigurationFiles(
@@ -556,7 +572,6 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
); );
} }
export async function buildImage({ export async function buildImage({
applicationId, applicationId,
tag, tag,
@@ -565,23 +580,26 @@ export async function buildImage({
dockerId, dockerId,
isCache = false, isCache = false,
debug = false, debug = false,
dockerFileLocation = '/Dockerfile' dockerFileLocation = '/Dockerfile',
commit
}) { }) {
if (isCache) { if (isCache) {
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
} else { } else {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
} }
if (!debug && isCache) { if (!debug) {
await saveBuildLog({ await saveBuildLog({
line: `Debug turned off. To see more details, allow it in the configuration.`, line: `Debug turned off. To see more details, allow it in the features tab.`,
buildId, buildId,
applicationId applicationId
}); });
} }
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}` const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}` const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} ${workdir}` })
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
const { status } = await prisma.build.findUnique({ where: { id: buildId } }) const { status } = await prisma.build.findUnique({ where: { id: buildId } })
if (status === 'canceled') { if (status === 'canceled') {
throw new Error('Deployment canceled.') throw new Error('Deployment canceled.')
@@ -593,30 +611,6 @@ export async function buildImage({
} }
} }
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
await new Promise((resolve, reject) => {
docker.engine.modem.followProgress(stream, onFinished, onProgress);
function onFinished(err, res) {
if (err) reject(err);
resolve(res);
}
async function onProgress(event) {
if (event.error) {
reject(event.error);
} else if (event.stream) {
if (event.stream !== '\n') {
if (debug)
await saveBuildLog({
line: `${event.stream.replace('\n', '')}`,
buildId,
applicationId
});
}
}
}
});
}
export function makeLabelForStandaloneApplication({ export function makeLabelForStandaloneApplication({
applicationId, applicationId,
fqdn, fqdn,
@@ -643,6 +637,7 @@ export function makeLabelForStandaloneApplication({
return [ return [
'coolify.managed=true', 'coolify.managed=true',
`coolify.version=${version}`, `coolify.version=${version}`,
`coolify.applicationId=${applicationId}`,
`coolify.type=standalone-application`, `coolify.type=standalone-application`,
`coolify.configuration=${base64Encode( `coolify.configuration=${base64Encode(
JSON.stringify({ JSON.stringify({
@@ -677,8 +672,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets, secrets,
pullmergeRequestId pullmergeRequestId
} = data; } = data;
const isPnpm = checkPnpm(installCommand, buildCommand); const isPnpm = checkPnpm(installCommand, buildCommand);
const Dockerfile: Array<string> = []; const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`); Dockerfile.push(`FROM ${imageForBuild}`);
@@ -688,7 +681,10 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {
@@ -706,6 +702,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
if (installCommand) { if (installCommand) {
Dockerfile.push(`RUN ${installCommand}`); Dockerfile.push(`RUN ${installCommand}`);
} }
// Dockerfile.push(`ARG CACHEBUST=1`);
Dockerfile.push(`RUN ${buildCommand}`); Dockerfile.push(`RUN ${buildCommand}`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n')); await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true }); await buildImage({ ...data, isCache: true });
@@ -722,7 +719,10 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {
@@ -762,4 +762,4 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json'); Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n')); await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true }); await buildImage({ ...data, isCache: true });
} }

View File

@@ -0,0 +1,100 @@
import { promises as fs } from 'fs';
import { defaultComposeConfiguration, executeDockerCmd } from '../common';
import { buildImage, saveBuildLog } from './common';
import yaml from 'js-yaml';
export default async function (data) {
let {
applicationId,
debug,
buildId,
dockerId,
network,
volumes,
labels,
workdir,
baseDirectory,
secrets,
pullmergeRequestId,
port,
dockerComposeConfiguration
} = data
const fileYml = `${workdir}${baseDirectory}/docker-compose.yml`;
const fileYaml = `${workdir}${baseDirectory}/docker-compose.yaml`;
let dockerComposeRaw = null;
let isYml = false;
try {
dockerComposeRaw = await fs.readFile(`${fileYml}`, 'utf8')
isYml = true
} catch (error) { }
try {
dockerComposeRaw = await fs.readFile(`${fileYaml}`, 'utf8')
} catch (error) { }
if (!dockerComposeRaw) {
throw ('docker-compose.yml or docker-compose.yaml are not found!');
}
const dockerComposeYaml = yaml.load(dockerComposeRaw)
if (!dockerComposeYaml.services) {
throw 'No Services found in docker-compose file.'
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
let networks = {}
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
value['container_name'] = `${applicationId}-${key}`
value['env_file'] = envFound ? [`${workdir}/.env`] : []
value['labels'] = labels
value['volumes'] = volumes
if (dockerComposeConfiguration[key].port) {
value['expose'] = [dockerComposeConfiguration[key].port]
}
if (value['networks']?.length > 0) {
value['networks'].forEach((network) => {
networks[network] = {
name: network
}
})
}
value['networks'] = [...value['networks'] || '', network]
dockerComposeYaml.services[key] = { ...dockerComposeYaml.services[key], restart: defaultComposeConfiguration(network).restart, deploy: defaultComposeConfiguration(network).deploy }
}
dockerComposeYaml['volumes'] = Object.assign({ ...dockerComposeYaml['volumes'] }, ...composeVolumes)
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } })
await fs.writeFile(`${workdir}/docker-compose.${isYml ? 'yml' : 'yaml'}`, yaml.dump(dockerComposeYaml));
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` })
await saveBuildLog({ line: 'Pulling images from Compose file.', buildId, applicationId });
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` })
await saveBuildLog({ line: 'Building images from Compose file.', buildId, applicationId });
}

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {
@@ -46,7 +49,7 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`RUN deno cache ${denoMainFile}`); Dockerfile.push(`RUN deno cache ${denoMainFile}`);
Dockerfile.push(`ENV NO_COLOR true`); Dockerfile.push(`ENV NO_COLOR true`);
Dockerfile.push(`EXPOSE ${port}`); Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`); Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n')); await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
}; };

View File

@@ -13,40 +13,33 @@ export default async function (data) {
pullmergeRequestId, pullmergeRequestId,
dockerFileLocation dockerFileLocation
} = data } = data
try { const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
const file = `${workdir}${dockerFileLocation}`; data.workdir = `${workdir}${baseDirectory}`;
let dockerFileOut = `${workdir}`; const DockerfileRaw = await fs.readFile(`${file}`, 'utf8')
if (baseDirectory) { const Dockerfile: Array<string> = DockerfileRaw
dockerFileOut = `${workdir}${baseDirectory}`; .toString()
workdir = `${workdir}${baseDirectory}`; .trim()
} .split('\n');
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8')) Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
.toString() if (secrets.length > 0) {
.trim() secrets.forEach((secret) => {
.split('\n'); if (secret.isBuildSecret) {
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); if (
if (secrets.length > 0) { (pullmergeRequestId && secret.isPRMRSecret) ||
secrets.forEach((secret) => { (!pullmergeRequestId && !secret.isPRMRSecret)
if (secret.isBuildSecret) { ) {
if ( Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
(pullmergeRequestId && secret.isPRMRSecret) ||
(!pullmergeRequestId && !secret.isPRMRSecret)
) {
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
Dockerfile.forEach((line, index) => { Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) { if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`); Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
} }
}); });
}
} }
}); }
} });
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage(data);
} catch (error) {
throw error;
} }
await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage(data);
} }

View File

@@ -2,38 +2,16 @@ import { executeDockerCmd, prisma } from "../common"
import { saveBuildLog } from "./common"; import { saveBuildLog } from "./common";
export default async function (data: any): Promise<void> { export default async function (data: any): Promise<void> {
const { buildId, applicationId, tag, dockerId, debug, workdir } = data const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory } = data
try { try {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId }); await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
const { stdout } = await executeDockerCmd({ await executeDockerCmd({
debug,
dockerId, dockerId,
command: `pack build -p ${workdir} ${applicationId}:${tag} --builder heroku/buildpacks:20` command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder heroku/buildpacks:20`
}) })
if (debug) {
const array = stdout.split('\n')
for (const line of array) {
if (line !== '\n') {
await saveBuildLog({
line: `${line.replace('\n', '')}`,
buildId,
applicationId
});
}
}
}
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId }); await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
} catch (error) { } catch (error) {
const array = error.stdout.split('\n')
for (const line of array) {
if (line !== '\n') {
await saveBuildLog({
line: `${line.replace('\n', '')}`,
buildId,
applicationId
});
}
}
throw error; throw error;
} }
} }

View File

@@ -16,6 +16,7 @@ import python from './python';
import deno from './deno'; import deno from './deno';
import laravel from './laravel'; import laravel from './laravel';
import heroku from './heroku'; import heroku from './heroku';
import compose from './compose'
export { export {
node, node,
@@ -35,5 +36,6 @@ export {
python, python,
deno, deno,
laravel, laravel,
heroku heroku,
compose
}; };

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

View File

@@ -23,7 +23,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

View File

@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

View File

@@ -16,7 +16,10 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

View File

@@ -21,7 +21,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

View File

@@ -24,7 +24,10 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (secret.isBuildSecret) { if (secret.isBuildSecret) {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`); Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
} }
} else { } else {

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@ export function formatLabelsOnDocker(data) {
return container return container
}) })
} }
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<boolean> { export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
let containerFound = false; let containerFound = false;
try { try {
const { stdout } = await executeDockerCmd({ const { stdout } = await executeDockerCmd({
@@ -21,10 +21,12 @@ export async function checkContainer({ dockerId, container, remove = false }: {
command: command:
`docker inspect --format '{{json .State}}' ${container}` `docker inspect --format '{{json .State}}' ${container}`
}); });
containerFound = true
const parsedStdout = JSON.parse(stdout); const parsedStdout = JSON.parse(stdout);
const status = parsedStdout.Status; const status = parsedStdout.Status;
const isRunning = status === 'running'; const isRunning = status === 'running';
const isRestarting = status === 'restarting'
const isExited = status === 'exited'
if (status === 'created') { if (status === 'created') {
await executeDockerCmd({ await executeDockerCmd({
dockerId, dockerId,
@@ -39,13 +41,23 @@ export async function checkContainer({ dockerId, container, remove = false }: {
`docker rm ${container}` `docker rm ${container}`
}); });
} }
if (isRunning) {
containerFound = true; return {
} found: containerFound,
status: {
isRunning,
isRestarting,
isExited
}
};
} catch (err) { } catch (err) {
// Container not found // Container not found
} }
return containerFound; return {
found: false
};
} }
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> { export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
@@ -75,6 +87,9 @@ export async function removeContainer({
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` }) await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
await executeDockerCmd({ dockerId, command: `docker rm ${id}` }) await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
} }
if (JSON.parse(stdout).Status === 'exited') {
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
}
} catch (error) { } catch (error) {
throw error; throw error;
} }

View File

@@ -73,6 +73,4 @@ export default async function ({
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`); const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', ''); return commit.replace('\n', '');
} }

View File

@@ -10,7 +10,8 @@ export default async function ({
branch, branch,
buildId, buildId,
privateSshKey, privateSshKey,
customPort customPort,
forPublic
}: { }: {
applicationId: string; applicationId: string;
workdir: string; workdir: string;
@@ -21,11 +22,15 @@ export default async function ({
repodir: string; repodir: string;
privateSshKey: string; privateSshKey: string;
customPort: number; customPort: number;
forPublic: boolean;
}): Promise<string> { }): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, ''); const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId }); await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`); if (!forPublic) {
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
}
await saveBuildLog({ await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`, line: `Cloning ${repository}:${branch} branch.`,
@@ -33,9 +38,16 @@ export default async function ({
applicationId applicationId
}); });
await asyncExecShell( if (forPublic) {
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. ` await asyncExecShell(
); `git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
} else {
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
}
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`); const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', ''); return commit.replace('\n', '');
} }

View File

@@ -9,17 +9,16 @@ Bree.extend(TSBree);
const options: any = { const options: any = {
defaultExtension: 'js', defaultExtension: 'js',
// logger: new Cabin(), logger: new Cabin(),
logger: false, // logger: false,
workerMessageHandler: async ({ name, message }) => { // workerMessageHandler: async ({ name, message }) => {
if (name === 'deployApplication' && message?.deploying) { // if (name === 'deployApplication' && message?.deploying) {
if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) { // if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
scheduler.workers.get('deployApplication').postMessage('cancel') // scheduler.workers.get('deployApplication').postMessage('cancel')
} // }
} // }
}, // },
jobs: [ jobs: [
{ name: 'infrastructure' },
{ name: 'deployApplication' }, { name: 'deployApplication' },
], ],
}; };

View File

@@ -20,7 +20,7 @@ export const includeServices: any = {
glitchTip: true, glitchTip: true,
searxng: true, searxng: true,
weblate: true, weblate: true,
taiga: true taiga: true,
}; };
export async function configureServiceType({ export async function configureServiceType({
id, id,
@@ -378,6 +378,6 @@ export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.searxng.deleteMany({ where: { serviceId: id } }); await prisma.searxng.deleteMany({ where: { serviceId: id } });
await prisma.weblate.deleteMany({ where: { serviceId: id } }); await prisma.weblate.deleteMany({ where: { serviceId: id } });
await prisma.taiga.deleteMany({ where: { serviceId: id } }); await prisma.taiga.deleteMany({ where: { serviceId: id } });
await prisma.service.delete({ where: { id } }); await prisma.service.delete({ where: { id } });
} }

View File

@@ -5,6 +5,7 @@ import bcrypt from 'bcryptjs';
import { ServiceStartStop } from '../../routes/api/v1/services/types'; import { ServiceStartStop } from '../../routes/api/v1/services/types';
import { asyncSleep, ComposeFile, createDirectories, defaultComposeConfiguration, errorHandler, executeDockerCmd, getDomain, getFreePublicPort, getServiceFromDB, getServiceImage, getServiceMainPort, isARM, isDev, makeLabelForServices, persistentVolumes, prisma } from '../common'; import { asyncSleep, ComposeFile, createDirectories, defaultComposeConfiguration, errorHandler, executeDockerCmd, getDomain, getFreePublicPort, getServiceFromDB, getServiceImage, getServiceMainPort, isARM, isDev, makeLabelForServices, persistentVolumes, prisma } from '../common';
import { defaultServiceConfigurations } from '../services'; import { defaultServiceConfigurations } from '../services';
import { OnlyId } from '../../types';
export async function startService(request: FastifyRequest<ServiceStartStop>) { export async function startService(request: FastifyRequest<ServiceStartStop>) {
try { try {
@@ -69,6 +70,13 @@ export async function startService(request: FastifyRequest<ServiceStartStop>) {
if (type === 'taiga') { if (type === 'taiga') {
return await startTaigaService(request) return await startTaigaService(request)
} }
if (type === 'grafana') {
return await startGrafanaService(request)
}
if (type === 'trilium') {
return await startTriliumService(request)
}
throw `Service type ${type} not supported.` throw `Service type ${type} not supported.`
} catch (error) { } catch (error) {
throw { status: 500, message: error?.message || error } throw { status: 500, message: error?.message || error }
@@ -314,15 +322,15 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
destinationDocker, destinationDocker,
persistentStorage, persistentStorage,
exposePort, exposePort,
minio: { rootUser, rootUserPassword }, minio: { rootUser, rootUserPassword, apiFqdn },
serviceSecret serviceSecret
} = service; } = service;
const network = destinationDockerId && destinationDocker.network; const network = destinationDockerId && destinationDocker.network;
const port = getServiceMainPort('minio'); const port = getServiceMainPort('minio');
const { service: { destinationDocker: { id: dockerId } } } = await prisma.minio.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } }) const { service: { destinationDocker: { remoteEngine, engine, remoteIpAddress } } } = await prisma.minio.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } })
const publicPort = await getFreePublicPort(id, dockerId); const publicPort = await getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress });
const consolePort = 9001; const consolePort = 9001;
const { workdir } = await createDirectories({ repository: type, buildId: id }); const { workdir } = await createDirectories({ repository: type, buildId: id });
@@ -333,7 +341,7 @@ async function startMinioService(request: FastifyRequest<ServiceStartStop>) {
image: `${image}:${version}`, image: `${image}:${version}`,
volumes: [`${id}-minio-data:/data`], volumes: [`${id}-minio-data:/data`],
environmentVariables: { environmentVariables: {
MINIO_SERVER_URL: fqdn, MINIO_SERVER_URL: apiFqdn,
MINIO_DOMAIN: getDomain(fqdn), MINIO_DOMAIN: getDomain(fqdn),
MINIO_ROOT_USER: rootUser, MINIO_ROOT_USER: rootUser,
MINIO_ROOT_PASSWORD: rootUserPassword, MINIO_ROOT_PASSWORD: rootUserPassword,
@@ -655,7 +663,7 @@ async function startLanguageToolService(request: FastifyRequest<ServiceStartStop
image: config.languagetool.image, image: config.languagetool.image,
environment: config.languagetool.environmentVariables, environment: config.languagetool.environmentVariables,
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}), ...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
volumes: config.languagetool, volumes: config.languagetool.volumes,
labels: makeLabelForServices('languagetool'), labels: makeLabelForServices('languagetool'),
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
} }
@@ -710,7 +718,7 @@ async function startN8nService(request: FastifyRequest<ServiceStartStop>) {
[id]: { [id]: {
container_name: id, container_name: id,
image: config.n8n.image, image: config.n8n.image,
volumes: config.n8n, volumes: config.n8n.volumes,
environment: config.n8n.environmentVariables, environment: config.n8n.environmentVariables,
labels: makeLabelForServices('n8n'), labels: makeLabelForServices('n8n'),
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}), ...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
@@ -900,8 +908,8 @@ async function startMeilisearchService(request: FastifyRequest<ServiceStartStop>
const { const {
meiliSearch: { masterKey } meiliSearch: { masterKey }
} = service; } = service;
const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } = const { type, version, destinationDockerId, destinationDocker,
service; serviceSecret, exposePort, persistentStorage } = service;
const network = destinationDockerId && destinationDocker.network; const network = destinationDockerId && destinationDocker.network;
const port = getServiceMainPort('meilisearch'); const port = getServiceMainPort('meilisearch');
@@ -911,7 +919,7 @@ async function startMeilisearchService(request: FastifyRequest<ServiceStartStop>
const config = { const config = {
meilisearch: { meilisearch: {
image: `${image}:${version}`, image: `${image}:${version}`,
volumes: [`${id}-datams:/data.ms`], volumes: [`${id}-datams:/meili_data/data.ms`, `${id}-data:/meili_data `],
environmentVariables: { environmentVariables: {
MEILI_MASTER_KEY: masterKey MEILI_MASTER_KEY: masterKey
} }
@@ -1006,79 +1014,136 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
} }
const initDbSQL = ` const initDbSQL = `
drop table if exists event; -- CreateTable
drop table if exists pageview; CREATE TABLE "account" (
drop table if exists session; "user_id" SERIAL NOT NULL,
drop table if exists website; "username" VARCHAR(255) NOT NULL,
drop table if exists account; "password" VARCHAR(60) NOT NULL,
"is_admin" BOOLEAN NOT NULL DEFAULT false,
create table account ( "created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
user_id serial primary key, "updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
username varchar(255) unique not null,
password varchar(60) not null, PRIMARY KEY ("user_id")
is_admin bool not null default false, );
created_at timestamp with time zone default current_timestamp,
updated_at timestamp with time zone default current_timestamp -- CreateTable
); CREATE TABLE "event" (
"event_id" SERIAL NOT NULL,
create table website ( "website_id" INTEGER NOT NULL,
website_id serial primary key, "session_id" INTEGER NOT NULL,
website_uuid uuid unique not null, "created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
user_id int not null references account(user_id) on delete cascade, "url" VARCHAR(500) NOT NULL,
name varchar(100) not null, "event_type" VARCHAR(50) NOT NULL,
domain varchar(500), "event_value" VARCHAR(50) NOT NULL,
share_id varchar(64) unique,
created_at timestamp with time zone default current_timestamp PRIMARY KEY ("event_id")
); );
create table session ( -- CreateTable
session_id serial primary key, CREATE TABLE "pageview" (
session_uuid uuid unique not null, "view_id" SERIAL NOT NULL,
website_id int not null references website(website_id) on delete cascade, "website_id" INTEGER NOT NULL,
created_at timestamp with time zone default current_timestamp, "session_id" INTEGER NOT NULL,
hostname varchar(100), "created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
browser varchar(20), "url" VARCHAR(500) NOT NULL,
os varchar(20), "referrer" VARCHAR(500),
device varchar(20),
screen varchar(11), PRIMARY KEY ("view_id")
language varchar(35), );
country char(2)
); -- CreateTable
CREATE TABLE "session" (
create table pageview ( "session_id" SERIAL NOT NULL,
view_id serial primary key, "session_uuid" UUID NOT NULL,
website_id int not null references website(website_id) on delete cascade, "website_id" INTEGER NOT NULL,
session_id int not null references session(session_id) on delete cascade, "created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
created_at timestamp with time zone default current_timestamp, "hostname" VARCHAR(100),
url varchar(500) not null, "browser" VARCHAR(20),
referrer varchar(500) "os" VARCHAR(20),
); "device" VARCHAR(20),
"screen" VARCHAR(11),
create table event ( "language" VARCHAR(35),
event_id serial primary key, "country" CHAR(2),
website_id int not null references website(website_id) on delete cascade,
session_id int not null references session(session_id) on delete cascade, PRIMARY KEY ("session_id")
created_at timestamp with time zone default current_timestamp, );
url varchar(500) not null,
event_type varchar(50) not null, -- CreateTable
event_value varchar(50) not null CREATE TABLE "website" (
); "website_id" SERIAL NOT NULL,
"website_uuid" UUID NOT NULL,
create index website_user_id_idx on website(user_id); "user_id" INTEGER NOT NULL,
"name" VARCHAR(100) NOT NULL,
create index session_created_at_idx on session(created_at); "domain" VARCHAR(500),
create index session_website_id_idx on session(website_id); "share_id" VARCHAR(64),
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
create index pageview_created_at_idx on pageview(created_at);
create index pageview_website_id_idx on pageview(website_id); PRIMARY KEY ("website_id")
create index pageview_session_id_idx on pageview(session_id); );
create index pageview_website_id_created_at_idx on pageview(website_id, created_at);
create index pageview_website_id_session_id_created_at_idx on pageview(website_id, session_id, created_at); -- CreateIndex
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
create index event_created_at_idx on event(created_at);
create index event_website_id_idx on event(website_id); -- CreateIndex
create index event_session_id_idx on event(session_id); CREATE INDEX "event_created_at_idx" ON "event"("created_at");
-- CreateIndex
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
-- CreateIndex
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
-- CreateIndex
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
-- CreateIndex
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
-- CreateIndex
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
-- CreateIndex
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
-- CreateIndex
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
-- CreateIndex
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
-- CreateIndex
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
-- CreateIndex
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
-- CreateIndex
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
insert into account (username, password, is_admin) values ('admin', '${bcrypt.hashSync( insert into account (username, password, is_admin) values ('admin', '${bcrypt.hashSync(
umamiAdminPassword, umamiAdminPassword,
10 10
@@ -1116,7 +1181,6 @@ async function startUmamiService(request: FastifyRequest<ServiceStartStop>) {
}, },
volumes: volumeMounts volumes: volumeMounts
}; };
console.log(composeFile)
const composeFileDestination = `${workdir}/docker-compose.yaml`; const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await startServiceContainers(destinationDocker.id, composeFileDestination) await startServiceContainers(destinationDocker.id, composeFileDestination)
@@ -1318,10 +1382,6 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
const teamId = request.user.teamId; const teamId = request.user.teamId;
const { version, fqdn, destinationDocker, secrets, exposePort, network, port, workdir, image, appwrite } = await defaultServiceConfigurations({ id, teamId }) const { version, fqdn, destinationDocker, secrets, exposePort, network, port, workdir, image, appwrite } = await defaultServiceConfigurations({ id, teamId })
let isStatsEnabled = false
if (secrets.find(s => s === '_APP_USAGE_STATS=enabled')) {
isStatsEnabled = true
}
const { const {
opensslKeyV1, opensslKeyV1,
executorSecret, executorSecret,
@@ -1350,6 +1410,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
depends_on: [ depends_on: [
`${id}-mariadb`, `${id}-mariadb`,
`${id}-redis`, `${id}-redis`,
`${id}-influxdb`,
], ],
environment: [ environment: [
"_APP_ENV=production", "_APP_ENV=production",
@@ -1370,6 +1431,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_EXECUTOR_HOST=http://${id}-executor/v1`, `_APP_EXECUTOR_HOST=http://${id}-executor/v1`,
`_APP_STATSD_HOST=${id}-telegraf`, `_APP_STATSD_HOST=${id}-telegraf`,
"_APP_STATSD_PORT=8125", "_APP_STATSD_PORT=8125",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1393,6 +1455,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1416,6 +1479,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1434,6 +1498,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`, `_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_REDIS_HOST=${id}-redis`, `_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379", "_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1467,6 +1532,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`_APP_EXECUTOR_SECRET=${executorSecret}`, `_APP_EXECUTOR_SECRET=${executorSecret}`,
`_APP_EXECUTOR_HOST=http://${id}-executor/v1`, `_APP_EXECUTOR_HOST=http://${id}-executor/v1`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1490,6 +1556,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1515,6 +1582,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1544,6 +1612,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1570,6 +1639,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`_APP_EXECUTOR_SECRET=${executorSecret}`, `_APP_EXECUTOR_SECRET=${executorSecret}`,
`_APP_EXECUTOR_HOST=http://${id}-executor/v1`, `_APP_EXECUTOR_HOST=http://${id}-executor/v1`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1594,6 +1664,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
environment: [ environment: [
"_APP_ENV=production", "_APP_ENV=production",
`_APP_EXECUTOR_SECRET=${executorSecret}`, `_APP_EXECUTOR_SECRET=${executorSecret}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1611,6 +1682,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`, `_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_REDIS_HOST=${id}-redis`, `_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379", "_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1627,6 +1699,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
"_APP_ENV=production", "_APP_ENV=production",
`_APP_REDIS_HOST=${id}-redis`, `_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379", "_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1651,6 +1724,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`_APP_DB_SCHEMA=${mariadbDatabase}`, `_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`, `_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`, `_APP_DB_PASS=${mariadbPassword}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1667,6 +1741,7 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
"_APP_ENV=production", "_APP_ENV=production",
`_APP_REDIS_HOST=${id}-redis`, `_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379", "_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1683,7 +1758,8 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
`MYSQL_ROOT_PASSWORD=${mariadbRootUserPassword}`, `MYSQL_ROOT_PASSWORD=${mariadbRootUserPassword}`,
`MYSQL_USER=${mariadbUser}`, `MYSQL_USER=${mariadbUser}`,
`MYSQL_PASSWORD=${mariadbPassword}`, `MYSQL_PASSWORD=${mariadbPassword}`,
`MYSQL_DATABASE=${mariadbDatabase}` `MYSQL_DATABASE=${mariadbDatabase}`,
`OPEN_RUNTIMES_NETWORK=${network}`,
], ],
command: "mysqld --innodb-flush-method=fsync", command: "mysqld --innodb-flush-method=fsync",
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
@@ -1697,15 +1773,11 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
}, },
[`${id}-usage-timeseries`]: {
};
if (isStatsEnabled) {
dockerCompose[id].depends_on.push(`${id}-influxdb`);
dockerCompose[`${id}-usage`] = {
image: `${image}:${version}`, image: `${image}:${version}`,
container_name: `${id}-usage`, container_name: `${id}-usage`,
labels: makeLabelForServices('appwrite'), labels: makeLabelForServices('appwrite'),
entrypoint: "usage", entrypoint: "usage --type=timeseries",
depends_on: [ depends_on: [
`${id}-mariadb`, `${id}-mariadb`,
`${id}-influxdb`, `${id}-influxdb`,
@@ -1722,29 +1794,56 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
"_APP_INFLUXDB_PORT=8086", "_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`, `_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379", "_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets ...secrets
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
} },
dockerCompose[`${id}-influxdb`] = { [`${id}-usage-database`]: {
image: `${image}:${version}`,
container_name: `${id}-usage-database`,
labels: makeLabelForServices('appwrite'),
entrypoint: "usage --type=database",
depends_on: [
`${id}-mariadb`,
`${id}-influxdb`,
],
environment: [
"_APP_ENV=production",
`_APP_OPENSSL_KEY_V1=${opensslKeyV1}`,
`_APP_DB_HOST=${mariadbHost}`,
`_APP_DB_PORT=${mariadbPort}`,
`_APP_DB_SCHEMA=${mariadbDatabase}`,
`_APP_DB_USER=${mariadbUser}`,
`_APP_DB_PASS=${mariadbPassword}`,
`_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086",
`_APP_REDIS_HOST=${id}-redis`,
"_APP_REDIS_PORT=6379",
`OPEN_RUNTIMES_NETWORK=${network}`,
...secrets
],
...defaultComposeConfiguration(network),
},
[`${id}-influxdb`]: {
image: "appwrite/influxdb:1.5.0", image: "appwrite/influxdb:1.5.0",
container_name: `${id}-influxdb`, container_name: `${id}-influxdb`,
volumes: [ volumes: [
`${id}-influxdb:/var/lib/influxdb:rw` `${id}-influxdb:/var/lib/influxdb:rw`
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
} },
dockerCompose[`${id}-telegraf`] = { [`${id}-telegraf`]: {
image: "appwrite/telegraf:1.4.0", image: "appwrite/telegraf:1.4.0",
container_name: `${id}-telegraf`, container_name: `${id}-telegraf`,
environment: [ environment: [
`_APP_INFLUXDB_HOST=${id}-influxdb`, `_APP_INFLUXDB_HOST=${id}-influxdb`,
"_APP_INFLUXDB_PORT=8086", "_APP_INFLUXDB_PORT=8086",
`OPEN_RUNTIMES_NETWORK=${network}`,
], ],
...defaultComposeConfiguration(network), ...defaultComposeConfiguration(network),
} }
} };
const composeFile: any = { const composeFile: any = {
version: '3.8', version: '3.8',
services: dockerCompose, services: dockerCompose,
@@ -1793,7 +1892,9 @@ async function startAppWriteService(request: FastifyRequest<ServiceStartStop>) {
} }
} }
async function startServiceContainers(dockerId, composeFileDestination) { async function startServiceContainers(dockerId, composeFileDestination) {
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` }) try {
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
} catch (error) { }
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` }) await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` }) await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` }) await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
@@ -1808,11 +1909,11 @@ async function stopServiceContainers(request: FastifyRequest<ServiceStartStop>)
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ await executeDockerCmd({
dockerId: destinationDockerId, dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -n 1 docker stop -t 0` command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
}) })
await executeDockerCmd({ await executeDockerCmd({
dockerId: destinationDockerId, dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -n 1 docker rm --force` command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
}) })
return {} return {}
} }
@@ -1979,8 +2080,8 @@ async function startGlitchTipService(request: FastifyRequest<ServiceStartStop>)
EMAIL_PORT: emailSmtpPort, EMAIL_PORT: emailSmtpPort,
EMAIL_HOST_USER: emailSmtpUser, EMAIL_HOST_USER: emailSmtpUser,
EMAIL_HOST_PASSWORD: emailSmtpPassword, EMAIL_HOST_PASSWORD: emailSmtpPassword,
EMAIL_USE_TLS: emailSmtpUseTls, EMAIL_USE_TLS: emailSmtpUseTls ? 'True' : 'False',
EMAIL_USE_SSL: emailSmtpUseSsl, EMAIL_USE_SSL: emailSmtpUseSsl ? 'True' : 'False',
EMAIL_BACKEND: emailBackend, EMAIL_BACKEND: emailBackend,
MAILGUN_API_KEY: mailgunApiKey, MAILGUN_API_KEY: mailgunApiKey,
SENDGRID_API_KEY: sendgridApiKey, SENDGRID_API_KEY: sendgridApiKey,
@@ -2590,3 +2691,132 @@ async function startTaigaService(request: FastifyRequest<ServiceStartStop>) {
} }
} }
async function startGrafanaService(request: FastifyRequest<ServiceStartStop>) {
try {
const { id } = request.params;
const teamId = request.user.teamId;
const service = await getServiceFromDB({ id, teamId });
const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } =
service;
const network = destinationDockerId && destinationDocker.network;
const port = getServiceMainPort('grafana');
const { workdir } = await createDirectories({ repository: type, buildId: id });
const image = getServiceImage(type);
const config = {
grafana: {
image: `${image}:${version}`,
volumes: [`${id}-grafana:/var/lib/grafana`],
environmentVariables: {}
}
};
if (serviceSecret.length > 0) {
serviceSecret.forEach((secret) => {
config.grafana.environmentVariables[secret.name] = secret.value;
});
}
const { volumeMounts } = persistentVolumes(id, persistentStorage, config)
const composeFile: ComposeFile = {
version: '3.8',
services: {
[id]: {
container_name: id,
image: config.grafana.image,
volumes: config.grafana.volumes,
environment: config.grafana.environmentVariables,
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
labels: makeLabelForServices('grafana'),
...defaultComposeConfiguration(network),
}
},
networks: {
[network]: {
external: true
}
},
volumes: volumeMounts
};
const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await startServiceContainers(destinationDocker.id, composeFileDestination)
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
async function startTriliumService(request: FastifyRequest<ServiceStartStop>) {
try {
const { id } = request.params;
const teamId = request.user.teamId;
const service = await getServiceFromDB({ id, teamId });
const { type, version, destinationDockerId, destinationDocker, serviceSecret, exposePort, persistentStorage } =
service;
const network = destinationDockerId && destinationDocker.network;
const port = getServiceMainPort('trilium');
const { workdir } = await createDirectories({ repository: type, buildId: id });
const image = getServiceImage(type);
const config = {
trilium: {
image: `${image}:${version}`,
volumes: [`${id}-trilium:/home/node/trilium-data`],
environmentVariables: {}
}
};
if (serviceSecret.length > 0) {
serviceSecret.forEach((secret) => {
config.trilium.environmentVariables[secret.name] = secret.value;
});
}
const { volumeMounts } = persistentVolumes(id, persistentStorage, config)
const composeFile: ComposeFile = {
version: '3.8',
services: {
[id]: {
container_name: id,
image: config.trilium.image,
volumes: config.trilium.volumes,
environment: config.trilium.environmentVariables,
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
labels: makeLabelForServices('trilium'),
...defaultComposeConfiguration(network),
}
},
networks: {
[network]: {
external: true
}
},
volumes: volumeMounts
};
const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
await startServiceContainers(destinationDocker.id, composeFileDestination)
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const teamId = request.user.teamId;
const {
destinationDockerId,
destinationDocker,
} = await getServiceFromDB({ id, teamId });
if (destinationDockerId) {
await executeDockerCmd({
dockerId: destinationDocker.id,
command: `docker exec ${id} migrate`
})
return await reply.code(201).send()
}
throw { status: 500, message: 'Could cleanup logs.' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

View File

@@ -1,3 +1,24 @@
/*
Example of a supported version:
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
*/
export const supportedServiceTypesAndVersions = [ export const supportedServiceTypesAndVersions = [
{ {
name: 'plausibleanalytics', name: 'plausibleanalytics',
@@ -8,7 +29,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'stable', recommendedVersion: 'stable',
ports: { ports: {
main: 8000 main: 8000
} },
labels: ['analytics', 'plausible', 'plausible-analytics', 'gdpr', 'no-cookie']
}, },
{ {
name: 'nocodb', name: 'nocodb',
@@ -18,7 +40,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8080 main: 8080
} },
labels: ['nocodb', 'airtable', 'database']
}, },
{ {
name: 'minio', name: 'minio',
@@ -28,7 +51,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 9001 main: 9001
} },
labels: ['minio', 's3', 'storage']
}, },
{ {
name: 'vscodeserver', name: 'vscodeserver',
@@ -38,18 +62,20 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8080 main: 8080
} },
labels: ['vscodeserver', 'vscode', 'code-server', 'ide']
}, },
{ {
name: 'wordpress', name: 'wordpress',
fancyName: 'Wordpress', fancyName: 'WordPress',
baseImage: 'wordpress', baseImage: 'wordpress',
images: ['bitnami/mysql:5.7'], images: ['bitnami/mysql:5.7'],
versions: ['latest', 'php8.1', 'php8.0', 'php7.4', 'php7.3'], versions: ['latest', 'php8.1', 'php8.0', 'php7.4', 'php7.3'],
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 80 main: 80
} },
labels: ['wordpress', 'blog', 'cms']
}, },
{ {
name: 'vaultwarden', name: 'vaultwarden',
@@ -59,7 +85,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 80 main: 80
} },
labels: ['vaultwarden', 'password-manager', 'passwords']
}, },
{ {
name: 'languagetool', name: 'languagetool',
@@ -69,7 +96,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8010 main: 8010
} },
labels: ['languagetool', 'grammar', 'spell-checker']
}, },
{ {
name: 'n8n', name: 'n8n',
@@ -79,7 +107,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 5678 main: 5678
} },
labels: ['n8n', 'workflow', 'automation', 'ifttt', 'zapier', 'nodered']
}, },
{ {
name: 'uptimekuma', name: 'uptimekuma',
@@ -89,7 +118,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 3001 main: 3001
} },
labels: ['uptimekuma', 'uptime', 'monitoring']
}, },
{ {
name: 'ghost', name: 'ghost',
@@ -100,7 +130,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 2368 main: 2368
} },
labels: ['ghost', 'blog', 'cms']
}, },
{ {
name: 'meilisearch', name: 'meilisearch',
@@ -111,18 +142,20 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 7700 main: 7700
} },
labels: ['meilisearch', 'search', 'search-engine']
}, },
{ {
name: 'umami', name: 'umami',
fancyName: 'Umami', fancyName: 'Umami',
baseImage: 'ghcr.io/mikecao/umami', baseImage: 'ghcr.io/umami-software/umami',
images: ['postgres:12-alpine'], images: ['postgres:12-alpine'],
versions: ['postgresql-latest'], versions: ['postgresql-latest'],
recommendedVersion: 'postgresql-latest', recommendedVersion: 'postgresql-latest',
ports: { ports: {
main: 3000 main: 3000
} },
labels: ['umami', 'analytics', 'gdpr', 'no-cookie']
}, },
{ {
name: 'hasura', name: 'hasura',
@@ -133,7 +166,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'v2.10.0', recommendedVersion: 'v2.10.0',
ports: { ports: {
main: 8080 main: 8080
} },
labels: ['hasura', 'graphql', 'database']
}, },
{ {
name: 'fider', name: 'fider',
@@ -144,18 +178,20 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'stable', recommendedVersion: 'stable',
ports: { ports: {
main: 3000 main: 3000
} },
labels: ['fider', 'feedback', 'suggestions']
}, },
{ {
name: 'appwrite', name: 'appwrite',
fancyName: 'Appwrite', fancyName: 'Appwrite',
baseImage: 'appwrite/appwrite', baseImage: 'appwrite/appwrite',
images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'], images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'],
versions: ['latest', '0.15.3'], versions: ['latest', '1.0', '0.15.3'],
recommendedVersion: '0.15.3', recommendedVersion: '1.0',
ports: { ports: {
main: 80 main: 80
} },
labels: ['appwrite', 'database', 'storage', 'api', 'serverless']
}, },
// { // {
// name: 'moodle', // name: 'moodle',
@@ -177,7 +213,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8000 main: 8000
} },
labels: ['glitchtip', 'error-reporting', 'error', 'sentry', 'bugsnag']
}, },
{ {
name: 'searxng', name: 'searxng',
@@ -188,7 +225,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8080 main: 8080
} },
labels: ['searxng', 'search', 'search-engine']
}, },
{ {
name: 'weblate', name: 'weblate',
@@ -199,7 +237,8 @@ export const supportedServiceTypesAndVersions = [
recommendedVersion: 'latest', recommendedVersion: 'latest',
ports: { ports: {
main: 8080 main: 8080
} },
labels: ['weblate', 'translation', 'localization']
}, },
// { // {
// name: 'taiga', // name: 'taiga',
@@ -212,4 +251,28 @@ export const supportedServiceTypesAndVersions = [
// main: 80 // main: 80
// } // }
// }, // },
]; {
name: 'grafana',
fancyName: 'Grafana',
baseImage: 'grafana/grafana',
images: [],
versions: ['latest', '9.1.3', '9.1.2', '9.0.8', '8.3.11', '8.4.11', '8.5.11'],
recommendedVersion: 'latest',
ports: {
main: 3000
},
labels: ['grafana', 'monitoring', 'metrics', 'dashboard']
},
{
name: 'trilium',
fancyName: 'Trilium Notes',
baseImage: 'zadam/trilium',
images: [],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8080
},
labels: ['trilium', 'notes', 'note-taking', 'wiki']
},
];

View File

@@ -5,6 +5,7 @@ import axios from 'axios';
import { FastifyReply } from 'fastify'; import { FastifyReply } from 'fastify';
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import csv from 'csvtojson';
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import { makeLabelForStandaloneApplication, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common'; import { makeLabelForStandaloneApplication, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
@@ -12,7 +13,7 @@ import { checkDomainsIsValidInDNS, checkDoubleBranch, checkExposedPort, createDi
import { checkContainer, formatLabelsOnDocker, isContainerExited, removeContainer } from '../../../../lib/docker'; import { checkContainer, formatLabelsOnDocker, isContainerExited, removeContainer } from '../../../../lib/docker';
import type { FastifyRequest } from 'fastify'; import type { FastifyRequest } from 'fastify';
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication } from './types'; import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds } from './types';
import { OnlyId } from '../../../../types'; import { OnlyId } from '../../../../types';
function filterObject(obj, callback) { function filterObject(obj, callback) {
@@ -68,22 +69,105 @@ export async function getImages(request: FastifyRequest<GetImages>) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function cleanupUnconfiguredApplications(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId
let applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
});
for (const application of applications) {
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
if (application?.destinationDockerId && application.destinationDocker?.network) {
const { stdout: containers } = await executeDockerCmd({
dockerId: application.destinationDocker.id,
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
})
if (containers) {
const containersArray = containers.trim().split('\n');
for (const container of containersArray) {
const containerObj = JSON.parse(container);
const id = containerObj.ID;
await removeContainer({ id, dockerId: application.destinationDocker.id });
}
}
}
await prisma.applicationSettings.deleteMany({ where: { applicationId: application.id } });
await prisma.buildLog.deleteMany({ where: { applicationId: application.id } });
await prisma.build.deleteMany({ where: { applicationId: application.id } });
await prisma.secret.deleteMany({ where: { applicationId: application.id } });
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: application.id } });
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: application.id } });
await prisma.application.deleteMany({ where: { id: application.id } });
}
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getApplicationStatus(request: FastifyRequest<OnlyId>) { export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params const { id } = request.params
const { teamId } = request.user const { teamId } = request.user
let isRunning = false; let payload = []
let isExited = false;
const application: any = await getApplicationFromDB(id, teamId); const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) { if (application?.destinationDockerId) {
isRunning = await checkContainer({ dockerId: application.destinationDocker.id, container: id }); if (application.buildPack === 'compose') {
isExited = await isContainerExited(application.destinationDocker.id, id); const { stdout: containers } = await executeDockerCmd({
dockerId: application.destinationDocker.id,
command:
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
});
const containersArray = containers.trim().split('\n');
if (containersArray.length > 0 && containersArray[0] !== '') {
for (const container of containersArray) {
let isRunning = false;
let isExited = false;
let isRestarting = false;
const containerObj = JSON.parse(container);
const status = containerObj.State
if (status === 'running') {
isRunning = true;
}
if (status === 'exited') {
isExited = true;
}
if (status === 'restarting') {
isRestarting = true;
}
payload.push({
name: containerObj.Names,
status: {
isRunning,
isExited,
isRestarting
}
})
}
}
} else {
let isRunning = false;
let isExited = false;
let isRestarting = false;
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: id });
if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
payload.push({
name: id,
status: {
isRunning,
isExited,
isRestarting
}
})
}
}
} }
return { return payload
isRunning,
isExited,
};
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
@@ -157,7 +241,8 @@ export async function getApplicationFromDB(id: string, teamId: string) {
gitSource: { include: { githubApp: true, gitlabApp: true } }, gitSource: { include: { githubApp: true, gitlabApp: true } },
secrets: true, secrets: true,
persistentStorage: true, persistentStorage: true,
connectedDatabase: true connectedDatabase: true,
previewApplication: true
} }
}); });
if (!application) { if (!application) {
@@ -245,15 +330,17 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
baseImage, baseImage,
baseBuildImage, baseBuildImage,
deploymentType, deploymentType,
baseDatabaseBranch baseDatabaseBranch,
dockerComposeFile,
dockerComposeFileLocation,
dockerComposeConfiguration
} = request.body } = request.body
if (port) port = Number(port); if (port) port = Number(port);
if (exposePort) { if (exposePort) {
exposePort = Number(exposePort); exposePort = Number(exposePort);
} }
const { destinationDocker: { engine, remoteEngine, remoteIpAddress }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
const { destinationDocker: { id: dockerId, remoteIpAddress }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } }) if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress })
if (denoOptions) denoOptions = denoOptions.trim(); if (denoOptions) denoOptions = denoOptions.trim();
const defaultConfiguration = await setDefaultConfiguration({ const defaultConfiguration = await setDefaultConfiguration({
buildPack, buildPack,
@@ -280,6 +367,9 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
baseImage, baseImage,
baseBuildImage, baseBuildImage,
deploymentType, deploymentType,
dockerComposeFile,
dockerComposeFileLocation,
dockerComposeConfiguration,
...defaultConfiguration, ...defaultConfiguration,
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } } connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
} }
@@ -298,6 +388,9 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
baseImage, baseImage,
baseBuildImage, baseBuildImage,
deploymentType, deploymentType,
dockerComposeFile,
dockerComposeFileLocation,
dockerComposeConfiguration,
...defaultConfiguration ...defaultConfiguration
} }
}); });
@@ -313,15 +406,10 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) { export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) {
try { try {
const { id } = request.params const { id } = request.params
const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot, isDBBranching } = request.body const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot, isDBBranching, isCustomSSL } = request.body
// const isDouble = await checkDoubleBranch(branch, projectId);
// if (isDouble && autodeploy) {
// await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false } })
// throw { status: 500, message: 'Cannot activate automatic deployments until only one application is defined for this repository / branch.' }
// }
await prisma.application.update({ await prisma.application.update({
where: { id }, where: { id },
data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching } } }, data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching, isCustomSSL } } },
include: { destinationDocker: true } include: { destinationDocker: true }
}); });
return reply.code(201).send(); return reply.code(201).send();
@@ -339,10 +427,11 @@ export async function stopPreviewApplication(request: FastifyRequest<StopPreview
if (application?.destinationDockerId) { if (application?.destinationDockerId) {
const container = `${id}-${pullmergeRequestId}` const container = `${id}-${pullmergeRequestId}`
const { id: dockerId } = application.destinationDocker; const { id: dockerId } = application.destinationDocker;
const found = await checkContainer({ dockerId, container }); const { found } = await checkContainer({ dockerId, container });
if (found) { if (found) {
await removeContainer({ id: container, dockerId: application.destinationDocker.id }); await removeContainer({ id: container, dockerId: application.destinationDocker.id });
} }
await prisma.previewApplication.deleteMany({ where: { applicationId: application.id, pullmergeRequestId } })
} }
return reply.code(201).send(); return reply.code(201).send();
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -366,7 +455,10 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
if (secrets.length > 0) { if (secrets.length > 0) {
secrets.forEach((secret) => { secrets.forEach((secret) => {
if (pullmergeRequestId) { if (pullmergeRequestId) {
if (secret.isPRMRSecret) { const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`); envs.push(`${secret.name}=${secret.value}`);
} }
} else { } else {
@@ -463,7 +555,22 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
const application: any = await getApplicationFromDB(id, teamId); const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) { if (application?.destinationDockerId) {
const { id: dockerId } = application.destinationDocker; const { id: dockerId } = application.destinationDocker;
const found = await checkContainer({ dockerId, container: id }); if (application.buildPack === 'compose') {
const { stdout: containers } = await executeDockerCmd({
dockerId: application.destinationDocker.id,
command:
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
});
const containersArray = containers.trim().split('\n');
if (containersArray.length > 0 && containersArray[0] !== '') {
for (const container of containersArray) {
const containerObj = JSON.parse(container);
await removeContainer({ id: containerObj.ID, dockerId: application.destinationDocker.id });
}
}
return
}
const { found } = await checkContainer({ dockerId, container: id });
if (found) { if (found) {
await removeContainer({ id, dockerId: application.destinationDocker.id }); await removeContainer({ id, dockerId: application.destinationDocker.id });
} }
@@ -534,14 +641,14 @@ export async function checkDNS(request: FastifyRequest<CheckDNS>) {
} }
if (exposePort) exposePort = Number(exposePort); if (exposePort) exposePort = Number(exposePort);
const { destinationDocker: { id: dockerId, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } }) const { destinationDocker: { engine, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
const { isDNSCheckEnabled } = await prisma.setting.findFirst({}); const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
const found = await isDomainConfigured({ id, fqdn, remoteIpAddress }); const found = await isDomainConfigured({ id, fqdn, remoteIpAddress });
if (found) { if (found) {
throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` } throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` }
} }
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress }) if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
if (isDNSCheckEnabled && !isDev && !forceSave) { if (isDNSCheckEnabled && !isDev && !forceSave) {
let hostname = request.hostname.split(':')[0]; let hostname = request.hostname.split(':')[0];
if (remoteEngine) hostname = remoteIpAddress; if (remoteEngine) hostname = remoteIpAddress;
@@ -570,6 +677,24 @@ export async function getUsage(request) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function getUsageByContainer(request) {
try {
const { id, containerId } = request.params
const teamId = request.user?.teamId;
let usage = {};
const application: any = await getApplicationFromDB(id, teamId);
if (application.destinationDockerId) {
[usage] = await Promise.all([getContainerUsage(application.destinationDocker.id, containerId)]);
}
return {
usage
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deployApplication(request: FastifyRequest<DeployApplication>) { export async function deployApplication(request: FastifyRequest<DeployApplication>) {
try { try {
const { id } = request.params const { id } = request.params
@@ -607,7 +732,7 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
githubAppId: application.gitSource?.githubApp?.id, githubAppId: application.gitSource?.githubApp?.id,
gitlabAppId: application.gitSource?.gitlabApp?.id, gitlabAppId: application.gitSource?.gitlabApp?.id,
status: 'queued', status: 'queued',
type: 'manual' type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
} }
}); });
return { return {
@@ -755,7 +880,10 @@ export async function saveBuildPack(request, reply) {
try { try {
const { id } = request.params const { id } = request.params
const { buildPack } = request.body const { buildPack } = request.body
await prisma.application.update({ where: { id }, data: { buildPack } }); const { baseImage, baseBuildImage } = setDefaultBaseImage(
buildPack
);
await prisma.application.update({ where: { id }, data: { buildPack, baseImage, baseBuildImage } });
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
@@ -775,55 +903,79 @@ export async function saveConnectedDatabase(request, reply) {
export async function getSecrets(request: FastifyRequest<OnlyId>) { export async function getSecrets(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params const { id } = request.params
let secrets = await prisma.secret.findMany({ let secrets = await prisma.secret.findMany({
where: { applicationId: id }, where: { applicationId: id, isPRMRSecret: false },
orderBy: { createdAt: 'desc' } orderBy: { createdAt: 'asc' }
}); });
let previewSecrets = await prisma.secret.findMany({
where: { applicationId: id, isPRMRSecret: true },
orderBy: { createdAt: 'asc' }
});
secrets = secrets.map((secret) => { secrets = secrets.map((secret) => {
secret.value = decrypt(secret.value); secret.value = decrypt(secret.value);
return secret; return secret;
}); });
secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => { previewSecrets = previewSecrets.map((secret) => {
return ('' + a.name).localeCompare(b.name); secret.value = decrypt(secret.value);
}) return secret;
});
return { return {
secrets previewSecrets: previewSecrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
}),
secrets: secrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
})
} }
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function updatePreviewSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
try {
const { id } = request.params
let { name, value } = request.body
if (value) {
value = encrypt(value.trim())
} else {
value = ''
}
await prisma.secret.updateMany({
where: { applicationId: id, name, isPRMRSecret: true },
data: { value }
});
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function updateSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
try {
const { id } = request.params
const { name, value, isBuildSecret = undefined } = request.body
await prisma.secret.updateMany({
where: { applicationId: id, name },
data: { value: encrypt(value.trim()), isBuildSecret }
});
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) { export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
try { try {
const { id } = request.params const { id } = request.params
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body const { name, value, isBuildSecret = false } = request.body
await prisma.secret.create({
if (isNew) { data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: false, application: { connect: { id } } }
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } }); });
if (found) { await prisma.secret.create({
throw { status: 500, message: `Secret ${name} already exists.` } data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: true, application: { connect: { id } } }
} else { });
value = encrypt(value.trim());
await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
});
}
} else {
value = encrypt(value.trim());
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
if (found) {
await prisma.secret.updateMany({
where: { applicationId: id, name, isPRMRSecret },
data: { value, isBuildSecret, isPRMRSecret }
});
} else {
await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
});
}
}
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
@@ -884,6 +1036,181 @@ export async function deleteStorage(request: FastifyRequest<DeleteStorage>) {
} }
} }
export async function restartPreview(request: FastifyRequest<RestartPreviewApplication>, reply: FastifyReply) {
try {
const { id, pullmergeRequestId } = request.params
const { teamId } = request.user
let application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
const buildId = cuid();
const { id: dockerId, network } = application.destinationDocker;
const { secrets, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
const { workdir } = await createDirectories({ repository, buildId });
const labels = []
let image = null
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
const containersArray = container.trim().split('\n');
for (const container of containersArray) {
const containerObj = formatLabelsOnDocker(container);
image = containerObj[0].Image
Object.keys(containerObj[0].Labels).forEach(function (key) {
if (key.startsWith('coolify')) {
labels.push(`${key}=${containerObj[0].Labels[key]}`)
}
})
}
let imageFound = false;
try {
await executeDockerCmd({
dockerId,
command: `docker image inspect ${image}`
})
imageFound = true;
} catch (error) {
//
}
if (!imageFound) {
throw { status: 500, message: 'Image not found, cannot restart application.' }
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[`${applicationId}-${pullmergeRequestId}`]: {
image,
container_name: `${applicationId}-${pullmergeRequestId}`,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(network),
}
},
networks: {
[network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
await executeDockerCmd({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
return reply.code(201).send();
}
throw { status: 500, message: 'Application cannot be restarted.' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getPreviewStatus(request: FastifyRequest<RestartPreviewApplication>) {
try {
const { id, pullmergeRequestId } = request.params
const { teamId } = request.user
let isRunning = false;
let isExited = false;
let isRestarting = false;
let isBuilding = false
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId) {
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: `${id}-${pullmergeRequestId}` });
if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
}
const building = await prisma.build.findMany({ where: { applicationId: id, pullmergeRequestId, status: { in: ['queued', 'running'] } } })
isBuilding = building.length > 0
}
return {
isBuilding,
isRunning,
isRestarting,
isExited,
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function loadPreviews(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
if (stdout === '') {
throw { status: 500, message: 'No previews found.' }
}
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
const jsonContainers = containers
.map((container) =>
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
)
.filter((container) => {
return container.pullmergeRequestId && container.applicationId === id;
});
for (const container of jsonContainers) {
const found = await prisma.previewApplication.findMany({ where: { applicationId: container.applicationId, pullmergeRequestId: container.pullmergeRequestId } })
if (found.length === 0) {
await prisma.previewApplication.create({
data: {
pullmergeRequestId: container.pullmergeRequestId,
sourceBranch: container.branch,
customDomain: container.fqdn,
application: { connect: { id: container.applicationId } }
}
})
}
}
return {
previews: await prisma.previewApplication.findMany({ where: { applicationId: id } })
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getPreviews(request: FastifyRequest<OnlyId>) { export async function getPreviews(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params const { id } = request.params
@@ -899,26 +1226,7 @@ export async function getPreviews(request: FastifyRequest<OnlyId>) {
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret); const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret); const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
if (stdout === '') {
return {
containers: [],
applicationSecrets: [],
PRMRSecrets: []
}
}
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
const jsonContainers = containers
.map((container) =>
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
)
.filter((container) => {
return container.pullmergeRequestId && container.applicationId === id;
});
return { return {
containers: jsonContainers,
applicationSecrets: applicationSecrets.sort((a, b) => { applicationSecrets: applicationSecrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name); return ('' + a.name).localeCompare(b.name);
}), }),
@@ -933,7 +1241,7 @@ export async function getPreviews(request: FastifyRequest<OnlyId>) {
export async function getApplicationLogs(request: FastifyRequest<GetApplicationLogs>) { export async function getApplicationLogs(request: FastifyRequest<GetApplicationLogs>) {
try { try {
const { id } = request.params; const { id, containerId } = request.params;
let { since = 0 } = request.query let { since = 0 } = request.query
if (since !== 0) { if (since !== 0) {
since = day(since).unix(); since = day(since).unix();
@@ -944,10 +1252,8 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
}); });
if (destinationDockerId) { if (destinationDockerId) {
try { try {
// const found = await checkContainer({ dockerId, container: id })
// if (found) {
const { default: ansi } = await import('strip-ansi') const { default: ansi } = await import('strip-ansi')
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` }) const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a); const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
const logs = stripLogsStderr.concat(stripLogsStdout) const logs = stripLogsStderr.concat(stripLogsStdout)
@@ -955,7 +1261,10 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
return { logs: sortedLogs } return { logs: sortedLogs }
// } // }
} catch (error) { } catch (error) {
const { statusCode } = error; const { statusCode, stderr } = error;
if (stderr.startsWith('Error: No such container')) {
return { logs: [], noContainer: true }
}
if (statusCode === 404) { if (statusCode === 404) {
return { return {
logs: [] logs: []
@@ -970,7 +1279,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) { export async function getBuilds(request: FastifyRequest<GetBuilds>) {
try { try {
const { id } = request.params const { id } = request.params
let { buildId, skip = 0 } = request.query let { buildId, skip = 0 } = request.query
@@ -987,17 +1296,15 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
builds = await prisma.build.findMany({ builds = await prisma.build.findMany({
where: { applicationId: id }, where: { applicationId: id },
orderBy: { createdAt: 'desc' }, orderBy: { createdAt: 'desc' },
take: 5, take: 5 + skip
skip
}); });
} }
builds = builds.map((build) => { builds = builds.map((build) => {
const updatedAt = day(build.updatedAt).utc(); if (build.status === 'running') {
build.took = updatedAt.diff(day(build.createdAt)) / 1000; build.elapsed = (day().utc().diff(day(build.createdAt)) / 1000).toFixed(0);
build.since = updatedAt.fromNow(); }
return build; return build
}); })
return { return {
builds, builds,
buildCount buildCount
@@ -1009,22 +1316,49 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
export async function getBuildIdLogs(request: FastifyRequest<GetBuildIdLogs>) { export async function getBuildIdLogs(request: FastifyRequest<GetBuildIdLogs>) {
try { try {
const { buildId } = request.params // TODO: Fluentbit could still hold the logs, so we need to check if the logs are done
const { buildId, id } = request.params
let { sequence = 0 } = request.query let { sequence = 0 } = request.query
if (typeof sequence !== 'number') { if (typeof sequence !== 'number') {
sequence = Number(sequence) sequence = Number(sequence)
} }
let logs = await prisma.buildLog.findMany({ let file = `/app/logs/${id}_buildlog_${buildId}.csv`
where: { buildId, time: { gt: sequence } }, if (isDev) {
orderBy: { time: 'asc' } file = `${process.cwd()}/../../logs/${id}_buildlog_${buildId}.csv`
}); }
const data = await prisma.build.findFirst({ where: { id: buildId } }); const data = await prisma.build.findFirst({ where: { id: buildId } });
const createdAt = day(data.createdAt).utc(); const createdAt = day(data.createdAt).utc();
try {
await fs.stat(file)
} catch (error) {
let logs = await prisma.buildLog.findMany({
where: { buildId, time: { gt: sequence } },
orderBy: { time: 'asc' }
});
const data = await prisma.build.findFirst({ where: { id: buildId } });
const createdAt = day(data.createdAt).utc();
return {
logs: logs.map(log => {
log.time = Number(log.time)
return log
}),
fromDb: true,
took: day().diff(createdAt) / 1000,
status: data?.status || 'queued'
}
}
let fileLogs = (await fs.readFile(file)).toString()
let decryptedLogs = await csv({ noheader: true }).fromString(fileLogs)
let logs = decryptedLogs.map(log => {
const parsed = {
time: log['field1'],
line: decrypt(log['field2'] + '","' + log['field3'])
}
return parsed
}).filter(log => log.time > sequence)
return { return {
logs: logs.map(log => { logs,
log.time = Number(log.time) fromDb: false,
return log
}),
took: day().diff(createdAt) / 1000, took: day().diff(createdAt) / 1000,
status: data?.status || 'queued' status: data?.status || 'queued'
} }

View File

@@ -1,8 +1,8 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { OnlyId } from '../../../../types'; import { OnlyId } from '../../../../types';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, restartApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers'; import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, GetImages, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types'; import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
@@ -11,6 +11,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => await listApplications(request)); fastify.get('/', async (request) => await listApplications(request));
fastify.post<GetImages>('/images', async (request) => await getImages(request)); fastify.post<GetImages>('/images', async (request) => await getImages(request));
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredApplications(request));
fastify.post('/new', async (request, reply) => await newApplication(request, reply)); fastify.post('/new', async (request, reply) => await newApplication(request, reply));
fastify.get<OnlyId>('/:id', async (request) => await getApplication(request)); fastify.get<OnlyId>('/:id', async (request) => await getApplication(request));
@@ -30,6 +32,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request)); fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request));
fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply)); fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
fastify.put<SaveSecret>('/:id/secrets', async (request, reply) => await updateSecret(request, reply));
fastify.put<SaveSecret>('/:id/secrets/preview', async (request, reply) => await updatePreviewSecret(request, reply));
fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request)); fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request));
fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request)); fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request));
@@ -37,12 +41,17 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request)); fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request));
fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request)); fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request));
fastify.post<OnlyId>('/:id/previews/load', async (request) => await loadPreviews(request));
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request)); // fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
fastify.get<GetBuildLogs>('/:id/logs/build', async (request) => await getBuildLogs(request)); fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request)); fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
fastify.get('/:id/usage', async (request) => await getUsage(request)) fastify.get('/:id/usage', async (request) => await getUsage(request))
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request)) fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply)); fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));

View File

@@ -21,12 +21,15 @@ export interface SaveApplication extends OnlyId {
baseImage: string, baseImage: string,
baseBuildImage: string, baseBuildImage: string,
deploymentType: string, deploymentType: string,
baseDatabaseBranch: string baseDatabaseBranch: string,
dockerComposeFile: string,
dockerComposeFileLocation: string,
dockerComposeConfiguration: string
} }
} }
export interface SaveApplicationSettings extends OnlyId { export interface SaveApplicationSettings extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string; };
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean }; Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean, isCustomSSL: boolean };
} }
export interface DeleteApplication extends OnlyId { export interface DeleteApplication extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string; };
@@ -65,7 +68,7 @@ export interface SaveSecret extends OnlyId {
name: string, name: string,
value: string, value: string,
isBuildSecret: boolean, isBuildSecret: boolean,
isPRMRSecret: boolean, previewSecret: boolean,
isNew: boolean isNew: boolean
} }
} }
@@ -84,12 +87,16 @@ export interface DeleteStorage extends OnlyId {
path: string, path: string,
} }
} }
export interface GetApplicationLogs extends OnlyId { export interface GetApplicationLogs {
Params: {
id: string,
containerId: string
}
Querystring: { Querystring: {
since: number, since: number,
} }
} }
export interface GetBuildLogs extends OnlyId { export interface GetBuilds extends OnlyId {
Querystring: { Querystring: {
buildId: string buildId: string
skip: number, skip: number,
@@ -97,6 +104,7 @@ export interface GetBuildLogs extends OnlyId {
} }
export interface GetBuildIdLogs { export interface GetBuildIdLogs {
Params: { Params: {
id: string,
buildId: string buildId: string
}, },
Querystring: { Querystring: {
@@ -126,4 +134,10 @@ export interface StopPreviewApplication extends OnlyId {
Body: { Body: {
pullmergeRequestId: string | null, pullmergeRequestId: string | null,
} }
}
export interface RestartPreviewApplication {
Params: {
id: string,
pullmergeRequestId: string | null,
}
} }

View File

@@ -2,15 +2,17 @@ import { FastifyPluginAsync } from 'fastify';
import { errorHandler, listSettings, version } from '../../../../lib/common'; import { errorHandler, listSettings, version } from '../../../../lib/common';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async () => { fastify.get('/', async (request) => {
const teamId = request.user?.teamId;
const settings = await listSettings() const settings = await listSettings()
try { try {
return { return {
ipv4: settings.ipv4, ipv4: teamId ? settings.ipv4 : 'nope',
ipv6: settings.ipv6, ipv6: teamId ? settings.ipv6 : 'nope',
version, version,
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true', whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON, whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
isRegistrationEnabled: settings.isRegistrationEnabled,
} }
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })

View File

@@ -3,11 +3,11 @@ import type { FastifyRequest } from 'fastify';
import { FastifyReply } from 'fastify'; import { FastifyReply } from 'fastify';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import fs from 'fs/promises'; import fs from 'fs/promises';
import { ComposeFile, createDirectories, decrypt, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common'; import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import { GetDatabaseLogs, OnlyId, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSettings, SaveVersion } from '../../../../types'; import type { OnlyId } from '../../../../types';
import { DeleteDatabase, SaveDatabaseType } from './types'; import type { DeleteDatabase, DeleteDatabaseSecret, GetDatabaseLogs, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSecret, SaveDatabaseSettings, SaveDatabaseType, SaveVersion } from './types';
export async function listDatabases(request: FastifyRequest) { export async function listDatabases(request: FastifyRequest) {
try { try {
@@ -51,6 +51,30 @@ export async function newDatabase(request: FastifyRequest, reply: FastifyReply)
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function cleanupUnconfiguredDatabases(request: FastifyRequest) {
try {
const teamId = request.user.teamId;
let databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
});
for (const database of databases) {
if (!database?.version) {
const { id } = database;
if (database.destinationDockerId) {
const everStarted = await stopDatabaseContainer(database);
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
}
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } });
}
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) { export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params; const { id } = request.params;
@@ -61,16 +85,18 @@ export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true } include: { destinationDocker: true, settings: true }
}); });
const { destinationDockerId, destinationDocker } = database; if (database) {
if (destinationDockerId) { const { destinationDockerId, destinationDocker } = database;
try { if (destinationDockerId) {
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` }) try {
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
if (JSON.parse(stdout).Running) { if (JSON.parse(stdout).Running) {
isRunning = true; isRunning = true;
}
} catch (error) {
//
} }
} catch (error) {
//
} }
} }
return { return {
@@ -92,15 +118,14 @@ export async function getDatabase(request: FastifyRequest<OnlyId>) {
if (!database) { if (!database) {
throw { status: 404, message: 'Database not found.' } throw { status: 404, message: 'Database not found.' }
} }
const { arch } = await listSettings(); const settings = await listSettings();
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword); if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword); if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
const configuration = generateDatabaseConfiguration(database, arch); const configuration = generateDatabaseConfiguration(database, settings.arch);
const settings = await listSettings();
return { return {
privatePort: configuration?.privatePort, privatePort: configuration?.privatePort,
database, database,
versions: await getDatabaseVersions(database.type, arch), versions: await getDatabaseVersions(database.type, settings.arch),
settings settings
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -220,7 +245,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
const database = await prisma.database.findFirst({ const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true } include: { destinationDocker: true, settings: true, databaseSecret: true }
}); });
const { arch } = await listSettings(); const { arch } = await listSettings();
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword); if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
@@ -230,7 +255,8 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
destinationDockerId, destinationDockerId,
destinationDocker, destinationDocker,
publicPort, publicPort,
settings: { isPublic } settings: { isPublic },
databaseSecret
} = database; } = database;
const { privatePort, command, environmentVariables, image, volume, ulimits } = const { privatePort, command, environmentVariables, image, volume, ulimits } =
generateDatabaseConfiguration(database, arch); generateDatabaseConfiguration(database, arch);
@@ -240,7 +266,11 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
const labels = await makeLabelForStandaloneDatabase({ id, image, volume }); const labels = await makeLabelForStandaloneDatabase({ id, image, volume });
const { workdir } = await createDirectories({ repository: type, buildId: id }); const { workdir } = await createDirectories({ repository: type, buildId: id });
if (databaseSecret.length > 0) {
databaseSecret.forEach((secret) => {
environmentVariables[secret.name] = decrypt(secret.value);
});
}
const composeFile: ComposeFile = { const composeFile: ComposeFile = {
version: '3.8', version: '3.8',
services: { services: {
@@ -248,20 +278,11 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
container_name: id, container_name: id,
image, image,
command, command,
networks: [network],
environment: environmentVariables, environment: environmentVariables,
volumes: [volume], volumes: [volume],
ulimits, ulimits,
labels, labels,
restart: 'always', ...defaultComposeConfiguration(network),
deploy: {
restart_policy: {
condition: 'on-failure',
delay: '5s',
max_attempts: 3,
window: '120s'
}
}
} }
}, },
networks: { networks: {
@@ -271,25 +292,16 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
}, },
volumes: { volumes: {
[volumeName]: { [volumeName]: {
external: true name: volumeName,
} }
} }
}; };
const composeFileDestination = `${workdir}/docker-compose.yaml`; const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile)); await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
try { await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker volume create ${volumeName}` }) if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
} catch (error) { } return {};
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
return {};
} catch (error) {
throw {
error
};
}
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
@@ -376,6 +388,7 @@ export async function deleteDatabase(request: FastifyRequest<DeleteDatabase>) {
} }
} }
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } }); await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } }); await prisma.database.delete({ where: { id } });
return {} return {}
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -436,10 +449,10 @@ export async function saveDatabaseSettings(request: FastifyRequest<SaveDatabaseS
let publicPort = null let publicPort = null
const { destinationDocker: { id: dockerId } } = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } }) const { destinationDocker: { remoteEngine, engine, remoteIpAddress } } = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } })
if (isPublic) { if (isPublic) {
publicPort = await getFreePublicPort(id, dockerId); publicPort = await getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress });
} }
await prisma.database.update({ await prisma.database.update({
where: { id }, where: { id },
@@ -471,4 +484,69 @@ export async function saveDatabaseSettings(request: FastifyRequest<SaveDatabaseS
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function getDatabaseSecrets(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
let secrets = await prisma.databaseSecret.findMany({
where: { databaseId: id },
orderBy: { createdAt: 'desc' }
});
secrets = secrets.map((secret) => {
secret.value = decrypt(secret.value);
return secret;
});
return {
secrets
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDatabaseSecret(request: FastifyRequest<SaveDatabaseSecret>, reply: FastifyReply) {
try {
const { id } = request.params
let { name, value, isNew } = request.body
if (isNew) {
const found = await prisma.databaseSecret.findFirst({ where: { name, databaseId: id } });
if (found) {
throw `Secret ${name} already exists.`
} else {
value = encrypt(value.trim());
await prisma.databaseSecret.create({
data: { name, value, database: { connect: { id } } }
});
}
} else {
value = encrypt(value.trim());
const found = await prisma.databaseSecret.findFirst({ where: { databaseId: id, name } });
if (found) {
await prisma.databaseSecret.updateMany({
where: { databaseId: id, name },
data: { value }
});
} else {
await prisma.databaseSecret.create({
data: { name, value, database: { connect: { id } } }
});
}
}
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteDatabaseSecret(request: FastifyRequest<DeleteDatabaseSecret>) {
try {
const { id } = request.params
const { name } = request.body
await prisma.databaseSecret.deleteMany({ where: { databaseId: id, name } });
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

View File

@@ -1,8 +1,9 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { deleteDatabase, getDatabase, getDatabaseLogs, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers'; import { cleanupUnconfiguredDatabases, deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
import type { DeleteDatabase, GetDatabaseLogs, OnlyId, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSettings, SaveVersion } from '../../../../types'; import type { OnlyId } from '../../../../types';
import type { SaveDatabaseType } from './types';
import type { DeleteDatabase, SaveDatabaseType, DeleteDatabaseSecret, GetDatabaseLogs, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSecret, SaveDatabaseSettings, SaveVersion } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
@@ -11,6 +12,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => await listDatabases(request)); fastify.get('/', async (request) => await listDatabases(request));
fastify.post('/new', async (request, reply) => await newDatabase(request, reply)); fastify.post('/new', async (request, reply) => await newDatabase(request, reply));
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredDatabases(request));
fastify.get<OnlyId>('/:id', async (request) => await getDatabase(request)); fastify.get<OnlyId>('/:id', async (request) => await getDatabase(request));
fastify.post<SaveDatabase>('/:id', async (request, reply) => await saveDatabase(request, reply)); fastify.post<SaveDatabase>('/:id', async (request, reply) => await saveDatabase(request, reply));
fastify.delete<DeleteDatabase>('/:id', async (request) => await deleteDatabase(request)); fastify.delete<DeleteDatabase>('/:id', async (request) => await deleteDatabase(request));
@@ -19,6 +22,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post<SaveDatabaseSettings>('/:id/settings', async (request) => await saveDatabaseSettings(request)); fastify.post<SaveDatabaseSettings>('/:id/settings', async (request) => await saveDatabaseSettings(request));
fastify.get<OnlyId>('/:id/secrets', async (request) => await getDatabaseSecrets(request));
fastify.post<SaveDatabaseSecret>('/:id/secrets', async (request, reply) => await saveDatabaseSecret(request, reply));
fastify.delete<DeleteDatabaseSecret>('/:id/secrets', async (request) => await deleteDatabaseSecret(request));
fastify.get('/:id/configuration/type', async (request) => await getDatabaseTypes(request)); fastify.get('/:id/configuration/type', async (request) => await getDatabaseTypes(request));
fastify.post<SaveDatabaseType>('/:id/configuration/type', async (request, reply) => await saveDatabaseType(request, reply)); fastify.post<SaveDatabaseType>('/:id/configuration/type', async (request, reply) => await saveDatabaseType(request, reply));

View File

@@ -5,4 +5,51 @@ export interface SaveDatabaseType extends OnlyId {
} }
export interface DeleteDatabase extends OnlyId { export interface DeleteDatabase extends OnlyId {
Body: { force: string } Body: { force: string }
} }
export interface SaveVersion extends OnlyId {
Body: {
version: string
}
}
export interface SaveDatabaseDestination extends OnlyId {
Body: {
destinationId: string
}
}
export interface GetDatabaseLogs extends OnlyId {
Querystring: {
since: number
}
}
export interface SaveDatabase extends OnlyId {
Body: {
name: string,
defaultDatabase: string,
dbUser: string,
dbUserPassword: string,
rootUser: string,
rootUserPassword: string,
version: string,
isRunning: boolean
}
}
export interface SaveDatabaseSettings extends OnlyId {
Body: {
isPublic: boolean,
appendOnly: boolean
}
}
export interface SaveDatabaseSecret extends OnlyId {
Body: {
name: string,
value: string,
isNew: string,
}
}
export interface DeleteDatabaseSecret extends OnlyId {
Body: {
name: string,
}
}

View File

@@ -4,7 +4,7 @@ import sshConfig from 'ssh-config'
import fs from 'fs/promises' import fs from 'fs/promises'
import os from 'os'; import os from 'os';
import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common'; import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, executeSSHCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
import { checkContainer } from '../../../../lib/docker'; import { checkContainer } from '../../../../lib/docker';
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
@@ -202,25 +202,58 @@ export async function assignSSHKey(request: FastifyRequest) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) { export async function verifyRemoteDockerEngineFn(id: string) {
await createRemoteEngineConfiguration(id);
const { remoteIpAddress, remoteUser, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
const host = `ssh://${remoteUser}@${remoteIpAddress}`
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
if (!stdout) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
}
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
if (!coolifyNetwork) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
const { stdout: daemonJson } = await executeSSHCmd({ dockerId: id, command: `cat /etc/docker/daemon.json` });
try { try {
const { id } = request.params; let daemonJsonParsed = JSON.parse(daemonJson);
await createRemoteEngineConfiguration(id); let isUpdated = false;
const { remoteIpAddress, remoteUser, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } }) if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
const host = `ssh://${remoteUser}@${remoteIpAddress}` isUpdated = true;
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`); daemonJsonParsed['live-restore'] = true
if (!stdout) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
}
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
if (!coolifyNetwork) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
return reply.code(201).send()
}
if (!daemonJsonParsed?.features?.buildkit) {
isUpdated = true;
daemonJsonParsed.features = {
buildkit: true
}
}
if (isUpdated) {
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
}
} catch (error) {
const daemonJsonParsed = {
"live-restore": true,
"features": {
"buildkit": true
}
}
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
} finally {
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
}
}
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
const { id } = request.params;
try {
await verifyRemoteDockerEngineFn(id);
return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } })
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
@@ -229,7 +262,7 @@ export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
try { try {
const { id } = request.params const { id } = request.params
const destination = await prisma.destinationDocker.findUnique({ where: { id } }) const destination = await prisma.destinationDocker.findUnique({ where: { id } })
const isRunning = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true }) const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
return { return {
isRunning isRunning
} }

View File

@@ -1,50 +1,64 @@
import os from 'node:os'; import axios from "axios";
import osu from 'node-os-utils'; import { compareVersions } from "compare-versions";
import axios from 'axios'; import cuid from "cuid";
import { compareVersions } from 'compare-versions'; import bcrypt from "bcryptjs";
import cuid from 'cuid'; import {
import bcrypt from 'bcryptjs'; asyncExecShell,
import { asyncExecShell, asyncSleep, cleanupDockerStorage, errorHandler, isDev, listSettings, prisma, uniqueName, version } from '../../../lib/common'; asyncSleep,
import { supportedServiceTypesAndVersions } from '../../../lib/services/supportedVersions'; cleanupDockerStorage,
import type { FastifyReply, FastifyRequest } from 'fastify'; errorHandler,
import type { Login, Update } from '.'; isDev,
import type { GetCurrentUser } from './types'; listSettings,
prisma,
uniqueName,
version,
} from "../../../lib/common";
import { supportedServiceTypesAndVersions } from "../../../lib/services/supportedVersions";
import { scheduler } from "../../../lib/scheduler";
import type { FastifyReply, FastifyRequest } from "fastify";
import type { Login, Update } from ".";
import type { GetCurrentUser } from "./types";
export async function hashPassword(password: string): Promise<string> { export async function hashPassword(password: string): Promise<string> {
const saltRounds = 15; const saltRounds = 15;
return bcrypt.hash(password, saltRounds); return bcrypt.hash(password, saltRounds);
} }
export async function cleanupManually() { export async function cleanupManually(request: FastifyRequest) {
try { try {
const destination = await prisma.destinationDocker.findFirst({ where: { engine: '/var/run/docker.sock' } }) const { serverId } = request.body;
await cleanupDockerStorage(destination.id, true, true) const destination = await prisma.destinationDocker.findUnique({
return {} where: { id: serverId },
});
await cleanupDockerStorage(destination.id, true, true);
return {};
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
} }
} }
export async function checkUpdate(request: FastifyRequest) { export async function checkUpdate(request: FastifyRequest) {
try { try {
const isStaging = request.hostname === 'staging.coolify.io' const isStaging =
request.hostname === "staging.coolify.io" ||
request.hostname === "arm.coolify.io";
const currentVersion = version; const currentVersion = version;
const { data: versions } = await axios.get( const { data: versions } = await axios.get(
`https://get.coollabs.io/versions.json?appId=${process.env['COOLIFY_APP_ID']}&version=${currentVersion}` `https://get.coollabs.io/versions.json?appId=${process.env["COOLIFY_APP_ID"]}&version=${currentVersion}`
); );
const latestVersion = versions['coolify'].main.version const latestVersion = versions["coolify"].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion); const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isStaging) { if (isStaging) {
return { return {
isUpdateAvailable: true, isUpdateAvailable: true,
latestVersion: 'next' latestVersion: "next",
} };
} }
return { return {
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1, isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
latestVersion latestVersion,
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
} }
} }
@@ -52,16 +66,14 @@ export async function update(request: FastifyRequest<Update>) {
const { latestVersion } = request.body; const { latestVersion } = request.body;
try { try {
if (!isDev) { if (!isDev) {
const { isAutoUpdateEnabled } = (await prisma.setting.findFirst()) || { const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
isAutoUpdateEnabled: false
};
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`); await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`); await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell( await asyncExecShell(
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
); );
await asyncExecShell( await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"` `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
); );
return {}; return {};
} else { } else {
@@ -69,13 +81,27 @@ export async function update(request: FastifyRequest<Update>) {
return {}; return {};
} }
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
}
}
export async function resetQueue(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId;
if (teamId === "0") {
await prisma.build.updateMany({
where: { status: { in: ["queued", "running"] } },
data: { status: "canceled" },
});
scheduler.workers.get("deployApplication").postMessage("cancel");
}
} catch ({ status, message }) {
return errorHandler({ status, message });
} }
} }
export async function restartCoolify(request: FastifyRequest<any>) { export async function restartCoolify(request: FastifyRequest<any>) {
try { try {
const teamId = request.user.teamId; const teamId = request.user.teamId;
if (teamId === '0') { if (teamId === "0") {
if (!isDev) { if (!isDev) {
asyncExecShell(`docker restart coolify`); asyncExecShell(`docker restart coolify`);
return {}; return {};
@@ -83,135 +109,163 @@ export async function restartCoolify(request: FastifyRequest<any>) {
return {}; return {};
} }
} }
throw { status: 500, message: 'You are not authorized to restart Coolify.' }; throw {
} catch ({ status, message }) { status: 500,
return errorHandler({ status, message }) message: "You are not authorized to restart Coolify.",
}
}
export async function showUsage() {
try {
return {
usage: {
uptime: os.uptime(),
memory: await osu.mem.info(),
cpu: {
load: os.loadavg(),
usage: await osu.cpu.usage(),
count: os.cpus().length
},
disk: await osu.drive.info('/')
}
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
} }
} }
export async function showDashboard(request: FastifyRequest) { export async function showDashboard(request: FastifyRequest) {
try { try {
const userId = request.user.userId; const userId = request.user.userId;
const teamId = request.user.teamId; const teamId = request.user.teamId;
const applications = await prisma.application.findMany({ let applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true } include: { settings: true, destinationDocker: true, teams: true },
}); });
const databases = await prisma.database.findMany({ const databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true } include: { settings: true, destinationDocker: true, teams: true },
}); });
const services = await prisma.service.findMany({ const services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } } where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { destinationDocker: true, teams: true },
});
const gitSources = await prisma.gitSource.findMany({
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
include: { teams: true },
});
const destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { teams: true },
}); });
const settings = await listSettings(); const settings = await listSettings();
let foundUnconfiguredApplication = false;
for (const application of applications) {
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
foundUnconfiguredApplication = true
}
}
let foundUnconfiguredService = false;
for (const service of services) {
if (!service.fqdn) {
foundUnconfiguredService = true
}
}
let foundUnconfiguredDatabase = false;
for (const database of databases) {
if (!database.version) {
foundUnconfiguredDatabase = true
}
}
return { return {
foundUnconfiguredApplication,
foundUnconfiguredDatabase,
foundUnconfiguredService,
applications, applications,
databases, databases,
services, services,
gitSources,
destinations,
settings, settings,
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
} }
} }
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) { export async function login(
request: FastifyRequest<Login>,
reply: FastifyReply
) {
if (request.user) { if (request.user) {
return reply.redirect('/dashboard'); return reply.redirect("/dashboard");
} else { } else {
const { email, password, isLogin } = request.body || {}; const { email, password, isLogin } = request.body || {};
if (!email || !password) { if (!email || !password) {
throw { status: 500, message: 'Email and password are required.' }; throw { status: 500, message: "Email and password are required." };
} }
const users = await prisma.user.count(); const users = await prisma.user.count();
const userFound = await prisma.user.findUnique({ const userFound = await prisma.user.findUnique({
where: { email }, where: { email },
include: { teams: true, permission: true }, include: { teams: true, permission: true },
rejectOnNotFound: false rejectOnNotFound: false,
}); });
if (!userFound && isLogin) { if (!userFound && isLogin) {
throw { status: 500, message: 'User not found.' }; throw { status: 500, message: "User not found." };
} }
const { isRegistrationEnabled, id } = await prisma.setting.findFirst() const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
let uid = cuid(); let uid = cuid();
let permission = 'read'; let permission = "read";
let isAdmin = false; let isAdmin = false;
if (users === 0) { if (users === 0) {
await prisma.setting.update({ where: { id }, data: { isRegistrationEnabled: false } }); await prisma.setting.update({
uid = '0'; where: { id },
data: { isRegistrationEnabled: false },
});
uid = "0";
} }
if (userFound) { if (userFound) {
if (userFound.type === 'email') { if (userFound.type === "email") {
if (userFound.password === 'RESETME') { if (userFound.password === "RESETME") {
const hashedPassword = await hashPassword(password); const hashedPassword = await hashPassword(password);
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) { if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
if (userFound.id === '0') { if (userFound.id === "0") {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: 'RESETME' } data: { password: "RESETME" },
}); });
} else { } else {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: 'RESETTIMEOUT' } data: { password: "RESETTIMEOUT" },
}); });
} }
throw { throw {
status: 500, status: 500,
message: 'Password reset link has expired. Please request a new one.' message:
"Password reset link has expired. Please request a new one.",
}; };
} else { } else {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: hashedPassword } data: { password: hashedPassword },
}); });
return { return {
userId: userFound.id, userId: userFound.id,
teamId: userFound.id, teamId: userFound.id,
permission: userFound.permission, permission: userFound.permission,
isAdmin: true isAdmin: true,
}; };
} }
} }
const passwordMatch = await bcrypt.compare(password, userFound.password); const passwordMatch = await bcrypt.compare(
password,
userFound.password
);
if (!passwordMatch) { if (!passwordMatch) {
throw { throw {
status: 500, status: 500,
message: 'Wrong password or email address.' message: "Wrong password or email address.",
}; };
} }
uid = userFound.id; uid = userFound.id;
isAdmin = true; isAdmin = true;
} }
} else { } else {
permission = 'owner'; permission = "owner";
isAdmin = true; isAdmin = true;
if (!isRegistrationEnabled) { if (!isRegistrationEnabled) {
throw { throw {
status: 404, status: 404,
message: 'Registration disabled by administrator.' message: "Registration disabled by administrator.",
}; };
} }
const hashedPassword = await hashPassword(password); const hashedPassword = await hashPassword(password);
@@ -221,17 +275,17 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
id: uid, id: uid,
email, email,
password: hashedPassword, password: hashedPassword,
type: 'email', type: "email",
teams: { teams: {
create: { create: {
id: uid, id: uid,
name: uniqueName(), name: uniqueName(),
destinationDocker: { connect: { network: 'coolify' } } destinationDocker: { connect: { network: "coolify" } },
} },
}, },
permission: { create: { teamId: uid, permission: 'owner' } } permission: { create: { teamId: uid, permission: "owner" } },
}, },
include: { teams: true } include: { teams: true },
}); });
} else { } else {
await prisma.user.create({ await prisma.user.create({
@@ -239,16 +293,16 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
id: uid, id: uid,
email, email,
password: hashedPassword, password: hashedPassword,
type: 'email', type: "email",
teams: { teams: {
create: { create: {
id: uid, id: uid,
name: uniqueName() name: uniqueName(),
} },
}, },
permission: { create: { teamId: uid, permission: 'owner' } } permission: { create: { teamId: uid, permission: "owner" } },
}, },
include: { teams: true } include: { teams: true },
}); });
} }
} }
@@ -256,18 +310,21 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
userId: uid, userId: uid,
teamId: uid, teamId: uid,
permission, permission,
isAdmin isAdmin,
}; };
} }
} }
export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) { export async function getCurrentUser(
let token = null request: FastifyRequest<GetCurrentUser>,
const { teamId } = request.query fastify
) {
let token = null;
const { teamId } = request.query;
try { try {
const user = await prisma.user.findUnique({ const user = await prisma.user.findUnique({
where: { id: request.user.userId } where: { id: request.user.userId },
}) });
if (!user) { if (!user) {
throw "User not found"; throw "User not found";
} }
@@ -278,28 +335,30 @@ export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fa
try { try {
const user = await prisma.user.findFirst({ const user = await prisma.user.findFirst({
where: { id: request.user.userId, teams: { some: { id: teamId } } }, where: { id: request.user.userId, teams: { some: { id: teamId } } },
include: { teams: true, permission: true } include: { teams: true, permission: true },
}) });
if (user) { if (user) {
const permission = user.permission.find(p => p.teamId === teamId).permission const permission = user.permission.find(
(p) => p.teamId === teamId
).permission;
const payload = { const payload = {
...request.user, ...request.user,
teamId, teamId,
permission: permission || null, permission: permission || null,
isAdmin: permission === 'owner' || permission === 'admin' isAdmin: permission === "owner" || permission === "admin",
};
} token = fastify.jwt.sign(payload);
token = fastify.jwt.sign(payload)
} }
} catch (error) { } catch (error) {
// No new token -> not switching teams // No new token -> not switching teams
} }
} }
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
return { return {
settings: await prisma.setting.findFirst(), settings: await prisma.setting.findFirst(),
pendingInvitations,
supportedServiceTypesAndVersions, supportedServiceTypesAndVersions,
token, token,
...request.user ...request.user,
} };
} }

View File

@@ -5,9 +5,10 @@ import { decrypt, errorHandler, prisma, uniqueName } from '../../../../lib/commo
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
import type { BodyId, InviteToTeam, SaveTeam, SetPermission } from './types'; import type { BodyId, DeleteUserFromTeam, InviteToTeam, SaveTeam, SetPermission } from './types';
export async function listTeams(request: FastifyRequest) {
export async function listAccounts(request: FastifyRequest) {
try { try {
const userId = request.user.userId; const userId = request.user.userId;
const teamId = request.user.teamId; const teamId = request.user.teamId;
@@ -15,10 +16,24 @@ export async function listTeams(request: FastifyRequest) {
where: { id: userId }, where: { id: userId },
select: { id: true, email: true, teams: true } select: { id: true, email: true, teams: true }
}); });
let accounts = []; let accounts = await prisma.user.findMany({ where: { teams: { some: { id: teamId } } }, select: { id: true, email: true, teams: true } });
let allTeams = [];
if (teamId === '0') { if (teamId === '0') {
accounts = await prisma.user.findMany({ select: { id: true, email: true, teams: true } }); accounts = await prisma.user.findMany({ select: { id: true, email: true, teams: true } });
}
return {
account,
accounts
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function listTeams(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
let allTeams = [];
if (teamId === '0') {
allTeams = await prisma.team.findMany({ allTeams = await prisma.team.findMany({
where: { users: { none: { id: userId } } }, where: { users: { none: { id: userId } } },
include: { permissions: true } include: { permissions: true }
@@ -28,18 +43,30 @@ export async function listTeams(request: FastifyRequest) {
where: { users: { some: { id: userId } } }, where: { users: { some: { id: userId } } },
include: { permissions: true } include: { permissions: true }
}); });
const invitations = await prisma.teamInvitation.findMany({ where: { uid: userId } });
return { return {
ownTeams, ownTeams,
allTeams, allTeams,
invitations,
account,
accounts
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function removeUserFromTeam(request: FastifyRequest<DeleteUserFromTeam>, reply: FastifyReply) {
try {
const { uid } = request.body;
const { id } = request.params;
const userId = request.user.userId;
const foundUser = await prisma.team.findMany({ where: { id, users: { some: { id: userId } } } });
if (foundUser.length === 0) {
return errorHandler({ status: 404, message: 'Team not found' });
}
await prisma.team.update({ where: { id }, data: { users: { disconnect: { id: uid } } } });
await prisma.permission.deleteMany({ where: { teamId: id, userId: uid } })
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteTeam(request: FastifyRequest<OnlyId>, reply: FastifyReply) { export async function deleteTeam(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try { try {
const userId = request.user.userId; const userId = request.user.userId;

View File

@@ -1,19 +1,22 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { acceptInvitation, changePassword, deleteTeam, getTeam, inviteToTeam, listTeams, newTeam, removeUser, revokeInvitation, saveTeam, setPermission } from './handlers'; import { acceptInvitation, changePassword, deleteTeam, getTeam, inviteToTeam, listAccounts, listTeams, newTeam, removeUser, removeUserFromTeam, revokeInvitation, saveTeam, setPermission } from './handlers';
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
import type { BodyId, InviteToTeam, SaveTeam, SetPermission } from './types'; import type { BodyId, DeleteUserFromTeam, InviteToTeam, SaveTeam, SetPermission } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
return await request.jwtVerify() return await request.jwtVerify()
}) })
fastify.get('/', async (request) => await listTeams(request));
fastify.get('/', async (request) => await listAccounts(request));
fastify.post('/new', async (request, reply) => await newTeam(request, reply)); fastify.post('/new', async (request, reply) => await newTeam(request, reply));
fastify.get('/teams', async (request) => await listTeams(request));
fastify.get<OnlyId>('/team/:id', async (request, reply) => await getTeam(request, reply)); fastify.get<OnlyId>('/team/:id', async (request, reply) => await getTeam(request, reply));
fastify.post<SaveTeam>('/team/:id', async (request, reply) => await saveTeam(request, reply)); fastify.post<SaveTeam>('/team/:id', async (request, reply) => await saveTeam(request, reply));
fastify.delete<OnlyId>('/team/:id', async (request, reply) => await deleteTeam(request, reply)); fastify.delete<OnlyId>('/team/:id', async (request, reply) => await deleteTeam(request, reply));
fastify.post<DeleteUserFromTeam>('/team/:id/user/remove', async (request, reply) => await removeUserFromTeam(request, reply));
fastify.post<InviteToTeam>('/team/:id/invitation/invite', async (request, reply) => await inviteToTeam(request, reply)) fastify.post<InviteToTeam>('/team/:id/invitation/invite', async (request, reply) => await inviteToTeam(request, reply))
fastify.post<BodyId>('/team/:id/invitation/accept', async (request) => await acceptInvitation(request)); fastify.post<BodyId>('/team/:id/invitation/accept', async (request) => await acceptInvitation(request));
@@ -23,7 +26,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.delete<BodyId>('/user/remove', async (request, reply) => await removeUser(request, reply)); fastify.delete<BodyId>('/user/remove', async (request, reply) => await removeUser(request, reply));
fastify.post<BodyId>('/user/password', async (request, reply) => await changePassword(request, reply)); fastify.post<BodyId>('/user/password', async (request, reply) => await changePassword(request, reply));
// fastify.delete('/user', async (request, reply) => await deleteUser(request, reply));
}; };

View File

@@ -5,6 +5,14 @@ export interface SaveTeam extends OnlyId {
name: string name: string
} }
} }
export interface DeleteUserFromTeam {
Body: {
uid: string
},
Params: {
id: string
}
}
export interface InviteToTeam { export interface InviteToTeam {
Body: { Body: {
email: string, email: string,

View File

@@ -1,6 +1,9 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { checkUpdate, login, showDashboard, update, showUsage, getCurrentUser, cleanupManually, restartCoolify } from './handlers'; import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
import { GetCurrentUser } from './types'; import { GetCurrentUser } from './types';
import pump from 'pump'
import fs from 'fs'
import { asyncExecShell, encrypt, errorHandler, prisma } from '../../../lib/common';
export interface Update { export interface Update {
Body: { latestVersion: string } Body: { latestVersion: string }
@@ -23,9 +26,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate] onRequest: [fastify.authenticate]
}, async (request) => await getCurrentUser(request, fastify)); }, async (request) => await getCurrentUser(request, fastify));
fastify.get('/undead', { fastify.get('/undead', async function () {
onRequest: [fastify.authenticate]
}, async function () {
return { message: 'nope' }; return { message: 'nope' };
}); });
@@ -43,17 +44,17 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate] onRequest: [fastify.authenticate]
}, async (request) => await showDashboard(request)); }, async (request) => await showDashboard(request));
fastify.get('/usage', {
onRequest: [fastify.authenticate]
}, async () => await showUsage());
fastify.post('/internal/restart', { fastify.post('/internal/restart', {
onRequest: [fastify.authenticate] onRequest: [fastify.authenticate]
}, async (request) => await restartCoolify(request)); }, async (request) => await restartCoolify(request));
fastify.post('/internal/resetQueue', {
onRequest: [fastify.authenticate]
}, async (request) => await resetQueue(request));
fastify.post('/internal/cleanup', { fastify.post('/internal/cleanup', {
onRequest: [fastify.authenticate] onRequest: [fastify.authenticate]
}, async () => await cleanupManually()); }, async (request) => await cleanupManually(request));
}; };
export default root; export default root;

View File

@@ -0,0 +1,125 @@
import type { FastifyRequest } from 'fastify';
import { errorHandler, executeDockerCmd, prisma, createRemoteEngineConfiguration, executeSSHCmd } from '../../../../lib/common';
import os from 'node:os';
import osu from 'node-os-utils';
export async function listServers(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
let servers = await prisma.destinationDocker.findMany({ where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }, distinct: ['remoteIpAddress', 'engine'] })
servers = servers.filter((server) => {
if (server.remoteEngine) {
if (server.remoteVerified) {
return server
}
} else {
return server
}
})
return {
servers
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
const mappingTable = [
['K total memory', 'totalMemoryKB'],
['K used memory', 'usedMemoryKB'],
['K active memory', 'activeMemoryKB'],
['K inactive memory', 'inactiveMemoryKB'],
['K free memory', 'freeMemoryKB'],
['K buffer memory', 'bufferMemoryKB'],
['K swap cache', 'swapCacheKB'],
['K total swap', 'totalSwapKB'],
['K used swap', 'usedSwapKB'],
['K free swap', 'freeSwapKB'],
['non-nice user cpu ticks', 'nonNiceUserCpuTicks'],
['nice user cpu ticks', 'niceUserCpuTicks'],
['system cpu ticks', 'systemCpuTicks'],
['idle cpu ticks', 'idleCpuTicks'],
['IO-wait cpu ticks', 'ioWaitCpuTicks'],
['IRQ cpu ticks', 'irqCpuTicks'],
['softirq cpu ticks', 'softIrqCpuTicks'],
['stolen cpu ticks', 'stolenCpuTicks'],
['pages paged in', 'pagesPagedIn'],
['pages paged out', 'pagesPagedOut'],
['pages swapped in', 'pagesSwappedIn'],
['pages swapped out', 'pagesSwappedOut'],
['interrupts', 'interrupts'],
['CPU context switches', 'cpuContextSwitches'],
['boot time', 'bootTime'],
['forks', 'forks']
];
function parseFromText(text) {
var data = {};
var lines = text.split(/\r?\n/);
for (const line of lines) {
for (const [key, value] of mappingTable) {
if (line.indexOf(key) >= 0) {
const values = line.match(/[0-9]+/)[0];
data[value] = parseInt(values, 10);
}
}
}
return data;
}
export async function showUsage(request: FastifyRequest) {
const { id } = request.params;
let { remoteEngine } = request.query
remoteEngine = remoteEngine === 'true' ? true : false
if (remoteEngine) {
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` })
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` })
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
const parsed: any = parseFromText(stats)
return {
usage: {
uptime: parsed.bootTime / 1024,
memory: {
totalMemMb: parsed.totalMemoryKB / 1024,
usedMemMb: parsed.usedMemoryKB / 1024,
freeMemMb: parsed.freeMemoryKB / 1024,
usedMemPercentage: (parsed.usedMemoryKB / parsed.totalMemoryKB) * 100,
freeMemPercentage: (parsed.totalMemoryKB - parsed.usedMemoryKB) / parsed.totalMemoryKB * 100
},
cpu: {
load: [0, 0, 0],
usage: cpuUsage,
count: cpus
},
disk: {
totalGb: (disks.split(' ')[0] / 1024).toFixed(1),
usedGb: (disks.split(' ')[1] / 1024).toFixed(1),
freeGb: (disks.split(' ')[0] - disks.split(' ')[1]).toFixed(1),
usedPercentage: disks.split(' ')[2].replace('%', ''),
freePercentage: 100 - disks.split(' ')[2].replace('%', '')
}
}
}
} else {
try {
return {
usage: {
uptime: os.uptime(),
memory: await osu.mem.info(),
cpu: {
load: os.loadavg(),
usage: await osu.cpu.usage(),
count: os.cpus().length
},
disk: await osu.drive.info('/')
}
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
}

View File

@@ -0,0 +1,14 @@
import { FastifyPluginAsync } from 'fastify';
import { listServers, showUsage } from './handlers';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
return await request.jwtVerify()
})
fastify.get('/', async (request) => await listServers(request));
fastify.get('/usage/:id', async (request) => await showUsage(request));
};
export default root;

View File

@@ -0,0 +1,27 @@
import { OnlyId } from "../../../../types"
export interface SaveTeam extends OnlyId {
Body: {
name: string
}
}
export interface InviteToTeam {
Body: {
email: string,
permission: string,
teamId: string,
teamName: string
}
}
export interface BodyId {
Body: {
id: string
}
}
export interface SetPermission {
Body: {
userId: string,
newPermission: string,
permissionId: string
}
}

View File

@@ -1,7 +1,7 @@
import type { FastifyReply, FastifyRequest } from 'fastify'; import type { FastifyReply, FastifyRequest } from 'fastify';
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, saveUpdateableFields, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort } from '../../../../lib/common'; import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, saveUpdateableFields, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort, listSettings } from '../../../../lib/common';
import { day } from '../../../../lib/dayjs'; import { day } from '../../../../lib/dayjs';
import { checkContainer, isContainerExited } from '../../../../lib/docker'; import { checkContainer, isContainerExited } from '../../../../lib/docker';
import cuid from 'cuid'; import cuid from 'cuid';
@@ -36,6 +36,33 @@ export async function newService(request: FastifyRequest, reply: FastifyReply) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function cleanupUnconfiguredServices(request: FastifyRequest) {
try {
const teamId = request.user.teamId;
let services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { destinationDocker: true, teams: true },
});
for (const service of services) {
if (!service.fqdn) {
if (service.destinationDockerId) {
await executeDockerCmd({
dockerId: service.destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
})
await executeDockerCmd({
dockerId: service.destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
})
}
await removeService({ id: service.id });
}
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getServiceStatus(request: FastifyRequest<OnlyId>) { export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
try { try {
const teamId = request.user.teamId; const teamId = request.user.teamId;
@@ -43,13 +70,17 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
let isRunning = false; let isRunning = false;
let isExited = false let isExited = false
let isRestarting = false;
const service = await getServiceFromDB({ id, teamId }); const service = await getServiceFromDB({ id, teamId });
const { destinationDockerId, settings } = service; const { destinationDockerId, settings } = service;
if (destinationDockerId) { if (destinationDockerId) {
isRunning = await checkContainer({ dockerId: service.destinationDocker.id, container: id }); const status = await checkContainer({ dockerId: service.destinationDocker.id, container: id });
isExited = await isContainerExited(service.destinationDocker.id, id); if (status?.found) {
isRunning = status.status.isRunning;
isExited = status.status.isExited;
isRestarting = status.status.isRestarting
}
} }
return { return {
isRunning, isRunning,
@@ -70,6 +101,7 @@ export async function getService(request: FastifyRequest<OnlyId>) {
throw { status: 404, message: 'Service not found.' } throw { status: 404, message: 'Service not found.' }
} }
return { return {
settings: await listSettings(),
service service
} }
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -232,7 +264,7 @@ export async function checkService(request: FastifyRequest<CheckService>) {
if (otherFqdns && otherFqdns.length > 0) otherFqdns = otherFqdns.map((f) => f.toLowerCase()); if (otherFqdns && otherFqdns.length > 0) otherFqdns = otherFqdns.map((f) => f.toLowerCase());
if (exposePort) exposePort = Number(exposePort); if (exposePort) exposePort = Number(exposePort);
const { destinationDocker: { id: dockerId, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.service.findUnique({ where: { id }, include: { destinationDocker: true } }) const { destinationDocker: { remoteIpAddress, remoteEngine, engine }, exposePort: configuredPort } = await prisma.service.findUnique({ where: { id }, include: { destinationDocker: true } })
const { isDNSCheckEnabled } = await prisma.setting.findFirst({}); const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
let found = await isDomainConfigured({ id, fqdn, remoteIpAddress }); let found = await isDomainConfigured({ id, fqdn, remoteIpAddress });
@@ -247,7 +279,7 @@ export async function checkService(request: FastifyRequest<CheckService>) {
} }
} }
} }
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress }) if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
if (isDNSCheckEnabled && !isDev && !forceSave) { if (isDNSCheckEnabled && !isDev && !forceSave) {
let hostname = request.hostname.split(':')[0]; let hostname = request.hostname.split(':')[0];
if (remoteEngine) hostname = remoteIpAddress; if (remoteEngine) hostname = remoteIpAddress;
@@ -451,7 +483,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ await executeDockerCmd({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker exec ${id} 'psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"'` command: `docker exec ${id}-postgresql psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"`
}) })
return await reply.code(201).send() return await reply.code(201).send()
} }
@@ -471,7 +503,7 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
if (destinationDockerId) { if (destinationDockerId) {
await executeDockerCmd({ await executeDockerCmd({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker exec ${id}-clickhouse sh -c "/usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\""` command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`
}) })
return await reply.code(201).send() return await reply.code(201).send()
} }
@@ -484,9 +516,9 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
const { id } = request.params const { id } = request.params
const { ftpEnabled } = request.body; const { ftpEnabled } = request.body;
const { service: { destinationDocker: { id: dockerId } } } = await prisma.wordpress.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } }) const { service: { destinationDocker: { engine, remoteEngine, remoteIpAddress } } } = await prisma.wordpress.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } })
const publicPort = await getFreePublicPort(id, dockerId); const publicPort = await getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress });
let ftpUser = cuid(); let ftpUser = cuid();
let ftpPassword = generatePassword({}); let ftpPassword = generatePassword({});
@@ -553,7 +585,7 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
}); });
try { try {
const isRunning = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` }); const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
if (isRunning) { if (isRunning) {
await executeDockerCmd({ await executeDockerCmd({
dockerId: destinationDocker.id, dockerId: destinationDocker.id,

View File

@@ -5,6 +5,7 @@ import {
checkService, checkService,
checkServiceDomain, checkServiceDomain,
cleanupPlausibleLogs, cleanupPlausibleLogs,
cleanupUnconfiguredServices,
deleteService, deleteService,
deleteServiceSecret, deleteServiceSecret,
deleteServiceStorage, deleteServiceStorage,
@@ -30,7 +31,7 @@ import {
import type { OnlyId } from '../../../../types'; import type { OnlyId } from '../../../../types';
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types'; import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types';
import { startService, stopService } from '../../../../lib/services/handlers'; import { migrateAppwriteDB, startService, stopService } from '../../../../lib/services/handlers';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
@@ -39,6 +40,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => await listServices(request)); fastify.get('/', async (request) => await listServices(request));
fastify.post('/new', async (request, reply) => await newService(request, reply)); fastify.post('/new', async (request, reply) => await newService(request, reply));
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredServices(request));
fastify.get<OnlyId>('/:id', async (request) => await getService(request)); fastify.get<OnlyId>('/:id', async (request) => await getService(request));
fastify.post<SaveService>('/:id', async (request, reply) => await saveService(request, reply)); fastify.post<SaveService>('/:id', async (request, reply) => await saveService(request, reply));
fastify.delete<OnlyId>('/:id', async (request) => await deleteService(request)); fastify.delete<OnlyId>('/:id', async (request) => await deleteService(request));
@@ -76,6 +79,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post<OnlyId>('/:id/plausibleanalytics/activate', async (request, reply) => await activatePlausibleUsers(request, reply)); fastify.post<OnlyId>('/:id/plausibleanalytics/activate', async (request, reply) => await activatePlausibleUsers(request, reply));
fastify.post<OnlyId>('/:id/plausibleanalytics/cleanup', async (request, reply) => await cleanupPlausibleLogs(request, reply)); fastify.post<OnlyId>('/:id/plausibleanalytics/cleanup', async (request, reply) => await cleanupPlausibleLogs(request, reply));
fastify.post<ActivateWordpressFtp>('/:id/wordpress/ftp', async (request, reply) => await activateWordpressFtp(request, reply)); fastify.post<ActivateWordpressFtp>('/:id/wordpress/ftp', async (request, reply) => await activateWordpressFtp(request, reply));
fastify.post<OnlyId>('/:id/appwrite/migrate', async (request, reply) => await migrateAppwriteDB(request, reply));
}; };
export default root; export default root;

View File

@@ -1,8 +1,9 @@
import { promises as dns } from 'dns'; import { promises as dns } from 'dns';
import { X509Certificate } from 'node:crypto';
import type { FastifyReply, FastifyRequest } from 'fastify'; import type { FastifyReply, FastifyRequest } from 'fastify';
import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, getDomain, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common'; import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common';
import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types'; import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
export async function listAllSettings(request: FastifyRequest) { export async function listAllSettings(request: FastifyRequest) {
@@ -16,8 +17,16 @@ export async function listAllSettings(request: FastifyRequest) {
unencryptedKeys.push({ id: key.id, name: key.name, privateKey: decrypt(key.privateKey), createdAt: key.createdAt }) unencryptedKeys.push({ id: key.id, name: key.name, privateKey: decrypt(key.privateKey), createdAt: key.createdAt })
} }
} }
const certificates = await prisma.certificate.findMany({ where: { team: { id: teamId } } })
let cns = [];
for (const certificate of certificates) {
const x509 = new X509Certificate(certificate.cert);
cns.push({ commonName: x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''), id: certificate.id, createdAt: certificate.createdAt })
}
return { return {
settings, settings,
certificates: cns,
sshKeys: unencryptedKeys sshKeys: unencryptedKeys
} }
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -58,7 +67,7 @@ export async function deleteDomain(request: FastifyRequest<DeleteDomain>, reply:
const { fqdn } = request.body const { fqdn } = request.body
const { DNSServers } = await listSettings(); const { DNSServers } = await listSettings();
if (DNSServers) { if (DNSServers) {
dns.setServers([DNSServers]); dns.setServers([...DNSServers.split(',')]);
} }
let ip; let ip;
try { try {
@@ -118,7 +127,7 @@ export async function saveSSHKey(request: FastifyRequest<SaveSSHKey>, reply: Fas
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
} }
export async function deleteSSHKey(request: FastifyRequest<DeleteSSHKey>, reply: FastifyReply) { export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
try { try {
const { id } = request.body; const { id } = request.body;
await prisma.sshKey.delete({ where: { id } }) await prisma.sshKey.delete({ where: { id } })
@@ -126,4 +135,15 @@ export async function deleteSSHKey(request: FastifyRequest<DeleteSSHKey>, reply:
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message })
} }
}
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
try {
const { id } = request.body;
await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`)
await prisma.certificate.delete({ where: { id } })
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
} }

View File

@@ -1,21 +1,59 @@
import { FastifyPluginAsync } from 'fastify'; import { FastifyPluginAsync } from 'fastify';
import { checkDNS, checkDomain, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers'; import { X509Certificate } from 'node:crypto';
import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types';
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers';
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => { fastify.addHook('onRequest', async (request) => {
return await request.jwtVerify() return await request.jwtVerify()
}) })
fastify.get('/', async (request) => await listAllSettings(request)); fastify.get('/', async (request) => await listAllSettings(request));
fastify.post<SaveSettings>('/', async (request, reply) => await saveSettings(request, reply)); fastify.post<SaveSettings>('/', async (request, reply) => await saveSettings(request, reply));
fastify.delete<DeleteDomain>('/', async (request, reply) => await deleteDomain(request, reply)); fastify.delete<DeleteDomain>('/', async (request, reply) => await deleteDomain(request, reply));
fastify.get<CheckDNS>('/check', async (request) => await checkDNS(request)); fastify.get<CheckDNS>('/check', async (request) => await checkDNS(request));
fastify.post<CheckDomain>('/check', async (request) => await checkDomain(request)); fastify.post<CheckDomain>('/check', async (request) => await checkDomain(request));
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply)); fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
fastify.delete<DeleteSSHKey>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply)); fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
fastify.post('/upload', async (request) => {
try {
const teamId = request.user.teamId;
const certificates = await prisma.certificate.findMany({})
let cns = [];
for (const certificate of certificates) {
const x509 = new X509Certificate(certificate.cert);
cns.push(x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''))
}
const parts = await request.files()
let key = null
let cert = null
for await (const part of parts) {
const name = part.fieldname
if (name === 'key') key = (await part.toBuffer()).toString()
if (name === 'cert') cert = (await part.toBuffer()).toString()
}
const x509 = new X509Certificate(cert);
const cn = x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', '')
if (cns.includes(cn)) {
throw {
message: `A certificate with ${cn} common name already exists.`
}
}
await prisma.certificate.create({ data: { cert, key: encrypt(key), team: { connect: { id: teamId } } } })
await prisma.applicationSettings.updateMany({ where: { application: { AND: [{ fqdn: { endsWith: cn } }, { fqdn: { startsWith: 'https' } }] } }, data: { isCustomSSL: true } })
return { message: 'Certificated uploaded' }
} catch ({ status, message }) {
return errorHandler({ status, message });
}
});
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
// fastify.get('/certificates', async (request) => await getCertificates(request))
}; };
export default root; export default root;

View File

@@ -41,4 +41,9 @@ export interface DeleteSSHKey {
Body: { Body: {
id: string id: string
} }
}
export interface OnlyIdInBody {
Body: {
id: string
}
} }

View File

@@ -9,7 +9,7 @@ export async function listSources(request: FastifyRequest) {
try { try {
const teamId = request.user?.teamId; const teamId = request.user?.teamId;
const sources = await prisma.gitSource.findMany({ const sources = await prisma.gitSource.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
include: { teams: true, githubApp: true, gitlabApp: true } include: { teams: true, githubApp: true, gitlabApp: true }
}); });
return { return {
@@ -22,11 +22,11 @@ export async function listSources(request: FastifyRequest) {
export async function saveSource(request, reply) { export async function saveSource(request, reply) {
try { try {
const { id } = request.params const { id } = request.params
let { name, htmlUrl, apiUrl, customPort } = request.body let { name, htmlUrl, apiUrl, customPort, isSystemWide } = request.body
if (customPort) customPort = Number(customPort) if (customPort) customPort = Number(customPort)
await prisma.gitSource.update({ await prisma.gitSource.update({
where: { id }, where: { id },
data: { name, htmlUrl, apiUrl, customPort } data: { name, htmlUrl, apiUrl, customPort, isSystemWide }
}); });
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -56,7 +56,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
} }
const source = await prisma.gitSource.findFirst({ const source = await prisma.gitSource.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, where: { id, OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
include: { githubApp: true, gitlabApp: true } include: { githubApp: true, gitlabApp: true }
}); });
if (!source) { if (!source) {
@@ -104,7 +104,7 @@ export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>
const { teamId } = request.user const { teamId } = request.user
const { id } = request.params const { id } = request.params
let { name, htmlUrl, apiUrl, organization, customPort } = request.body let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body
if (customPort) customPort = Number(customPort) if (customPort) customPort = Number(customPort)
if (id === 'new') { if (id === 'new') {
@@ -117,6 +117,7 @@ export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>
apiUrl, apiUrl,
organization, organization,
customPort, customPort,
isSystemWide,
type: 'github', type: 'github',
teams: { connect: { id: teamId } } teams: { connect: { id: teamId } }
} }

View File

@@ -7,6 +7,7 @@ export interface SaveGitHubSource extends OnlyId {
apiUrl: string, apiUrl: string,
organization: string, organization: string,
customPort: number, customPort: number,
isSystemWide: boolean
} }
} }
export interface SaveGitLabSource extends OnlyId { export interface SaveGitLabSource extends OnlyId {

View File

@@ -1,7 +1,7 @@
import axios from "axios"; import axios from "axios";
import cuid from "cuid"; import cuid from "cuid";
import crypto from "crypto"; import crypto from "crypto";
import { encrypt, errorHandler, getUIUrl, isDev, prisma } from "../../../lib/common"; import { encrypt, errorHandler, getDomain, getUIUrl, isDev, prisma } from "../../../lib/common";
import { checkContainer, removeContainer } from "../../../lib/docker"; import { checkContainer, removeContainer } from "../../../lib/docker";
import { createdBranchDatabase, getApplicationFromDBWebhook, removeBranchDatabase } from "../../api/v1/applications/handlers"; import { createdBranchDatabase, getApplicationFromDBWebhook, removeBranchDatabase } from "../../api/v1/applications/handlers";
@@ -66,13 +66,19 @@ export async function configureGitHubApp(request, reply) {
} }
export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promise<any> { export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promise<any> {
try { try {
const allowedGithubEvents = ['push', 'pull_request']; const allowedGithubEvents = ['push', 'pull_request', 'ping', 'installation'];
const allowedActions = ['opened', 'reopened', 'synchronize', 'closed']; const allowedActions = ['opened', 'reopened', 'synchronize', 'closed'];
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase(); const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase(); const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
if (!allowedGithubEvents.includes(githubEvent)) { if (!allowedGithubEvents.includes(githubEvent)) {
throw { status: 500, message: 'Event not allowed.' } throw { status: 500, message: 'Event not allowed.' }
} }
if (githubEvent === 'ping') {
return { pong: 'cool' }
}
if (githubEvent === 'installation') {
return { status: 'cool' }
}
let projectId, branch; let projectId, branch;
const body = request.body const body = request.body
if (githubEvent === 'push') { if (githubEvent === 'push') {
@@ -80,7 +86,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
branch = body.ref.includes('/') ? body.ref.split('/')[2] : body.ref; branch = body.ref.includes('/') ? body.ref.split('/')[2] : body.ref;
} else if (githubEvent === 'pull_request') { } else if (githubEvent === 'pull_request') {
projectId = body.pull_request.base.repo.id; projectId = body.pull_request.base.repo.id;
branch = body.pull_request.base.ref.includes('/') ? body.pull_request.base.ref.split('/')[2] : body.pull_request.base.ref; branch = body.pull_request.base.ref
} }
if (!projectId || !branch) { if (!projectId || !branch) {
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' } throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' }
@@ -147,14 +153,15 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
} else if (githubEvent === 'pull_request') { } else if (githubEvent === 'pull_request') {
const pullmergeRequestId = body.number.toString(); const pullmergeRequestId = body.number.toString();
const pullmergeRequestAction = body.action; const pullmergeRequestAction = body.action;
const sourceBranch = body.pull_request.head.ref.includes('/') ? body.pull_request.head.ref.split('/')[2] : body.pull_request.head.ref; const sourceBranch = body.pull_request.head.ref
const sourceRepository = body.pull_request.head.repo.full_name
if (!allowedActions.includes(pullmergeRequestAction)) { if (!allowedActions.includes(pullmergeRequestAction)) {
throw { status: 500, message: 'Action not allowed.' } throw { status: 500, message: 'Action not allowed.' }
} }
if (application.settings.previews) { if (application.settings.previews) {
if (application.destinationDockerId) { if (application.destinationDockerId) {
const isRunning = await checkContainer( const { found: isRunning } = await checkContainer(
{ {
dockerId: application.destinationDocker.id, dockerId: application.destinationDocker.id,
container: application.id container: application.id
@@ -169,24 +176,45 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
pullmergeRequestAction === 'reopened' || pullmergeRequestAction === 'reopened' ||
pullmergeRequestAction === 'synchronize' pullmergeRequestAction === 'synchronize'
) { ) {
await prisma.application.update({ await prisma.application.update({
where: { id: application.id }, where: { id: application.id },
data: { updatedAt: new Date() } data: { updatedAt: new Date() }
}); });
if (application.connectedDatabase && pullmergeRequestAction === 'opened' || pullmergeRequestAction === 'reopened') { let previewApplicationId = undefined
// Coolify hosted database if (pullmergeRequestId) {
if (application.connectedDatabase.databaseId) { const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
const databaseId = application.connectedDatabase.databaseId; if (foundPreviewApplications.length > 0) {
const database = await prisma.database.findUnique({ where: { id: databaseId } }); previewApplicationId = foundPreviewApplications[0].id
if (database) { } else {
await createdBranchDatabase(database, application.connectedDatabase.hostedDatabaseDBName, pullmergeRequestId); const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
} const previewApplication = await prisma.previewApplication.create({
data: {
pullmergeRequestId,
sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } }
}
})
previewApplicationId = previewApplication.id
} }
} }
// if (application.connectedDatabase && pullmergeRequestAction === 'opened' || pullmergeRequestAction === 'reopened') {
// // Coolify hosted database
// if (application.connectedDatabase.databaseId) {
// const databaseId = application.connectedDatabase.databaseId;
// const database = await prisma.database.findUnique({ where: { id: databaseId } });
// if (database) {
// await createdBranchDatabase(database, application.connectedDatabase.hostedDatabaseDBName, pullmergeRequestId);
// }
// }
// }
await prisma.build.create({ await prisma.build.create({
data: { data: {
id: buildId, id: buildId,
sourceRepository,
pullmergeRequestId, pullmergeRequestId,
previewApplicationId,
sourceBranch, sourceBranch,
applicationId: application.id, applicationId: application.id,
destinationDockerId: application.destinationDocker.id, destinationDockerId: application.destinationDocker.id,
@@ -198,7 +226,9 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
} }
}); });
return {
message: 'Queued. Thank you!'
};
} else if (pullmergeRequestAction === 'closed') { } else if (pullmergeRequestAction === 'closed') {
if (application.destinationDockerId) { if (application.destinationDockerId) {
const id = `${application.id}-${pullmergeRequestId}`; const id = `${application.id}-${pullmergeRequestId}`;
@@ -206,13 +236,22 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
await removeContainer({ id, dockerId: application.destinationDocker.id }); await removeContainer({ id, dockerId: application.destinationDocker.id });
} catch (error) { } } catch (error) { }
} }
if (application.connectedDatabase.databaseId) { const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
const databaseId = application.connectedDatabase.databaseId; if (foundPreviewApplications.length > 0) {
const database = await prisma.database.findUnique({ where: { id: databaseId } }); for (const preview of foundPreviewApplications) {
if (database) { await prisma.previewApplication.delete({ where: { id: preview.id } })
await removeBranchDatabase(database, pullmergeRequestId);
} }
} }
return {
message: 'PR closed. Thank you!'
};
// if (application?.connectedDatabase?.databaseId) {
// const databaseId = application.connectedDatabase.databaseId;
// const database = await prisma.database.findUnique({ where: { id: databaseId } });
// if (database) {
// await removeBranchDatabase(database, pullmergeRequestId);
// }
// }
} }
} }
} }

View File

@@ -23,6 +23,7 @@ export interface GitHubEvents {
ref: string, ref: string,
repo: { repo: {
id: string, id: string,
full_name: string,
} }
} }
} }

View File

@@ -2,7 +2,7 @@ import axios from "axios";
import cuid from "cuid"; import cuid from "cuid";
import crypto from "crypto"; import crypto from "crypto";
import type { FastifyReply, FastifyRequest } from "fastify"; import type { FastifyReply, FastifyRequest } from "fastify";
import { errorHandler, getAPIUrl, getUIUrl, isDev, listSettings, prisma } from "../../../lib/common"; import { errorHandler, getAPIUrl, getDomain, getUIUrl, isDev, listSettings, prisma } from "../../../lib/common";
import { checkContainer, removeContainer } from "../../../lib/docker"; import { checkContainer, removeContainer } from "../../../lib/docker";
import { getApplicationFromDB, getApplicationFromDBWebhook } from "../../api/v1/applications/handlers"; import { getApplicationFromDB, getApplicationFromDBWebhook } from "../../api/v1/applications/handlers";
@@ -39,9 +39,7 @@ export async function configureGitLabApp(request: FastifyRequest<ConfigureGitLab
export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) { export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
const { object_kind: objectKind, ref, project_id } = request.body const { object_kind: objectKind, ref, project_id } = request.body
try { try {
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update']; const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
const webhookToken = request.headers['x-gitlab-token']; const webhookToken = request.headers['x-gitlab-token'];
if (!webhookToken && !isDev) { if (!webhookToken && !isDev) {
throw { status: 500, message: 'Invalid webhookToken.' } throw { status: 500, message: 'Invalid webhookToken.' }
@@ -91,8 +89,8 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
} }
} }
} else if (objectKind === 'merge_request') { } else if (objectKind === 'merge_request') {
const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch, iid: pullmergeRequestId }, project: { id } } = request.body const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch, source: { path_with_namespace: sourceRepository } }, project: { id } } = request.body
const pullmergeRequestId = request.body.object_attributes.iid.toString();
const projectId = Number(id); const projectId = Number(id);
if (!allowedActions.includes(action)) { if (!allowedActions.includes(action)) {
throw { status: 500, message: 'Action not allowed.' } throw { status: 500, message: 'Action not allowed.' }
@@ -100,14 +98,13 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
if (isDraft) { if (isDraft) {
throw { status: 500, message: 'Draft MR, do nothing.' } throw { status: 500, message: 'Draft MR, do nothing.' }
} }
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch); const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
if (applicationsFound && applicationsFound.length > 0) { if (applicationsFound && applicationsFound.length > 0) {
for (const application of applicationsFound) { for (const application of applicationsFound) {
const buildId = cuid(); const buildId = cuid();
if (application.settings.previews) { if (application.settings.previews) {
if (application.destinationDockerId) { if (application.destinationDockerId) {
const isRunning = await checkContainer( const { found: isRunning } = await checkContainer(
{ {
dockerId: application.destinationDocker.id, dockerId: application.destinationDocker.id,
container: application.id container: application.id
@@ -130,10 +127,30 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
where: { id: application.id }, where: { id: application.id },
data: { updatedAt: new Date() } data: { updatedAt: new Date() }
}); });
let previewApplicationId = undefined
if (pullmergeRequestId) {
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
previewApplicationId = foundPreviewApplications[0].id
} else {
const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
const previewApplication = await prisma.previewApplication.create({
data: {
pullmergeRequestId,
sourceBranch,
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
application: { connect: { id: application.id } }
}
})
previewApplicationId = previewApplication.id
}
}
await prisma.build.create({ await prisma.build.create({
data: { data: {
id: buildId, id: buildId,
pullmergeRequestId: pullmergeRequestId.toString(), pullmergeRequestId,
previewApplicationId,
sourceRepository,
sourceBranch, sourceBranch,
applicationId: application.id, applicationId: application.id,
destinationDockerId: application.destinationDocker.id, destinationDockerId: application.destinationDocker.id,
@@ -150,8 +167,19 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
} else if (action === 'close') { } else if (action === 'close') {
if (application.destinationDockerId) { if (application.destinationDockerId) {
const id = `${application.id}-${pullmergeRequestId}`; const id = `${application.id}-${pullmergeRequestId}`;
await removeContainer({ id, dockerId: application.destinationDocker.id }); try {
await removeContainer({ id, dockerId: application.destinationDocker.id });
} catch (error) { }
} }
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
if (foundPreviewApplications.length > 0) {
for (const preview of foundPreviewApplications) {
await prisma.previewApplication.delete({ where: { id: preview.id } })
}
}
return {
message: 'MR closed. Thank you!'
};
} }
} }

View File

@@ -8,6 +8,9 @@ export interface GitLabEvents {
Body: { Body: {
object_attributes: { object_attributes: {
work_in_progress: string work_in_progress: string
source: {
path_with_namespace: string
}
isDraft: string isDraft: string
action: string action: string
source_branch: string source_branch: string

View File

@@ -6,13 +6,13 @@ import { TraefikOtherConfiguration } from "./types";
import { OnlyId } from "../../../types"; import { OnlyId } from "../../../types";
function configureMiddleware( function configureMiddleware(
{ id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type }, { id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type, isCustomSSL },
traefik traefik
) { ) {
if (isHttps) { if (isHttps) {
traefik.http.routers[id] = { traefik.http.routers[id] = {
entrypoints: ['web'], entrypoints: ['web'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`, rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
middlewares: ['redirect-to-https'] middlewares: ['redirect-to-https']
}; };
@@ -53,9 +53,9 @@ function configureMiddleware(
if (isDualCerts) { if (isDualCerts) {
traefik.http.routers[`${id}-secure`] = { traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`, rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: isCustomSSL ? true : {
certresolver: 'letsencrypt' certresolver: 'letsencrypt'
}, },
middlewares: [] middlewares: []
@@ -64,16 +64,16 @@ function configureMiddleware(
if (isWWW) { if (isWWW) {
traefik.http.routers[`${id}-secure-www`] = { traefik.http.routers[`${id}-secure-www`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`www.${nakedDomain}\`)`, rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: isCustomSSL ? true : {
certresolver: 'letsencrypt' certresolver: 'letsencrypt'
}, },
middlewares: [] middlewares: []
}; };
traefik.http.routers[`${id}-secure`] = { traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`)`, rule: `Host(\`${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: {
domains: { domains: {
@@ -86,7 +86,7 @@ function configureMiddleware(
} else { } else {
traefik.http.routers[`${id}-secure-www`] = { traefik.http.routers[`${id}-secure-www`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`www.${nakedDomain}\`)`, rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: {
domains: { domains: {
@@ -97,9 +97,9 @@ function configureMiddleware(
}; };
traefik.http.routers[`${id}-secure`] = { traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`${domain}\`)`, rule: `Host(\`${domain}\`) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: isCustomSSL ? true : {
certresolver: 'letsencrypt' certresolver: 'letsencrypt'
}, },
middlewares: [] middlewares: []
@@ -110,14 +110,14 @@ function configureMiddleware(
} else { } else {
traefik.http.routers[id] = { traefik.http.routers[id] = {
entrypoints: ['web'], entrypoints: ['web'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`, rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
middlewares: [] middlewares: []
}; };
traefik.http.routers[`${id}-secure`] = { traefik.http.routers[`${id}-secure`] = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`, rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
service: `${id}`, service: `${id}`,
tls: { tls: {
domains: { domains: {
@@ -178,7 +178,19 @@ function configureMiddleware(
export async function traefikConfiguration(request, reply) { export async function traefikConfiguration(request, reply) {
try { try {
const sslpath = '/etc/traefik/acme/custom';
const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } } } } })
let parsedCertificates = []
for (const certificate of certificates) {
parsedCertificates.push({
certFile: `${sslpath}/${certificate.id}-cert.pem`,
keyFile: `${sslpath}/${certificate.id}-key.pem`
})
}
const traefik = { const traefik = {
tls: {
certificates: parsedCertificates
},
http: { http: {
routers: {}, routers: {},
services: {}, services: {},
@@ -222,13 +234,42 @@ export async function traefikConfiguration(request, reply) {
fqdn, fqdn,
id, id,
port, port,
buildPack,
dockerComposeConfiguration,
destinationDocker, destinationDocker,
destinationDockerId, destinationDockerId,
settings: { previews, dualCerts } settings: { previews, dualCerts, isCustomSSL }
} = application; } = application;
if (destinationDockerId) { if (destinationDockerId) {
const { network, id: dockerId } = destinationDocker; const { network, id: dockerId } = destinationDocker;
const isRunning = true; const isRunning = true;
if (buildPack === 'compose') {
const services = Object.entries(JSON.parse(dockerComposeConfiguration))
for (const service of services) {
const [key, value] = service
const { port: customPort, fqdn } = value
if (fqdn) {
const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
data.applications.push({
id: `${id}-${key}`,
container: `${id}-${key}`,
port: customPort ? customPort : port || 3000,
domain,
nakedDomain,
isRunning,
isHttps,
isWWW,
isDualCerts: dualCerts,
isCustomSSL
});
}
}
continue;
}
if (fqdn) { if (fqdn) {
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
@@ -244,7 +285,8 @@ export async function traefikConfiguration(request, reply) {
isRunning, isRunning,
isHttps, isHttps,
isWWW, isWWW,
isDualCerts: dualCerts isDualCerts: dualCerts,
isCustomSSL
}); });
} }
if (previews) { if (previews) {
@@ -267,7 +309,8 @@ export async function traefikConfiguration(request, reply) {
nakedDomain, nakedDomain,
isHttps, isHttps,
isWWW, isWWW,
isDualCerts: dualCerts isDualCerts: dualCerts,
isCustomSSL
}); });
} }
} }
@@ -534,7 +577,19 @@ export async function traefikOtherConfiguration(request: FastifyRequest<TraefikO
export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>) { export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>) {
const { id } = request.params const { id } = request.params
try { try {
const sslpath = '/etc/traefik/acme/custom';
const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true } } } } })
let parsedCertificates = []
for (const certificate of certificates) {
parsedCertificates.push({
certFile: `${sslpath}/${certificate.id}-cert.pem`,
keyFile: `${sslpath}/${certificate.id}-key.pem`
})
}
const traefik = { const traefik = {
tls: {
certificates: parsedCertificates
},
http: { http: {
routers: {}, routers: {},
services: {}, services: {},
@@ -578,13 +633,41 @@ export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>
fqdn, fqdn,
id, id,
port, port,
buildPack,
dockerComposeConfiguration,
destinationDocker, destinationDocker,
destinationDockerId, destinationDockerId,
settings: { previews, dualCerts } settings: { previews, dualCerts, isCustomSSL }
} = application; } = application;
if (destinationDockerId) { if (destinationDockerId) {
const { id: dockerId, network } = destinationDocker; const { id: dockerId, network } = destinationDocker;
const isRunning = true; const isRunning = true;
if (buildPack === 'compose') {
const services = Object.entries(JSON.parse(dockerComposeConfiguration))
for (const service of services) {
const [key, value] = service
const { port: customPort, fqdn } = value
if (fqdn) {
const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
data.applications.push({
id: `${id}-${key}`,
container: `${id}-${key}`,
port: customPort ? customPort : port || 3000,
domain,
nakedDomain,
isRunning,
isHttps,
isWWW,
isDualCerts: dualCerts,
isCustomSSL
});
}
}
continue;
}
if (fqdn) { if (fqdn) {
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
@@ -600,7 +683,8 @@ export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>
isRunning, isRunning,
isHttps, isHttps,
isWWW, isWWW,
isDualCerts: dualCerts isDualCerts: dualCerts,
isCustomSSL
}); });
} }
if (previews) { if (previews) {
@@ -623,7 +707,8 @@ export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>
nakedDomain, nakedDomain,
isHttps, isHttps,
isWWW, isWWW,
isDualCerts: dualCerts isDualCerts: dualCerts,
isCustomSSL
}); });
} }
} }

View File

@@ -1,38 +1,4 @@
export interface OnlyId { export interface OnlyId {
Params: { id: string }, Params: { id: string },
} }
export interface SaveVersion extends OnlyId {
Body: {
version: string
}
}
export interface SaveDatabaseDestination extends OnlyId {
Body: {
destinationId: string
}
}
export interface GetDatabaseLogs extends OnlyId {
Querystring: {
since: number
}
}
export interface SaveDatabase extends OnlyId {
Body: {
name: string,
defaultDatabase: string,
dbUser: string,
dbUserPassword: string,
rootUser: string,
rootUserPassword: string,
version: string,
isRunning: boolean
}
}
export interface SaveDatabaseSettings extends OnlyId {
Body: {
isPublic: boolean,
appendOnly: boolean
}
}

View File

@@ -46,8 +46,11 @@
"@tailwindcss/typography": "^0.5.7", "@tailwindcss/typography": "^0.5.7",
"cuid": "2.1.8", "cuid": "2.1.8",
"daisyui": "2.24.2", "daisyui": "2.24.2",
"dayjs": "1.11.5",
"js-cookie": "3.0.1", "js-cookie": "3.0.1",
"js-yaml": "4.1.0",
"p-limit": "4.0.0", "p-limit": "4.0.0",
"svelte-file-dropzone": "^1.0.0",
"svelte-select": "4.4.7", "svelte-select": "4.4.7",
"sveltekit-i18n": "2.2.2" "sveltekit-i18n": "2.2.2"
} }

View File

@@ -3,33 +3,35 @@ import Cookies from 'js-cookie';
export function getAPIUrl() { export function getAPIUrl() {
if (GITPOD_WORKSPACE_URL) { if (GITPOD_WORKSPACE_URL) {
const { href } = new URL(GITPOD_WORKSPACE_URL) const { href } = new URL(GITPOD_WORKSPACE_URL);
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
return newURL return newURL;
} }
if (CODESANDBOX_HOST) { if (CODESANDBOX_HOST) {
return `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` return `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
} }
return dev ? 'http://localhost:3001' : 'http://localhost:3000'; return dev
? 'http://localhost:3001'
: 'http://localhost:3000';
} }
export function getWebhookUrl(type: string) { export function getWebhookUrl(type: string) {
if (GITPOD_WORKSPACE_URL) { if (GITPOD_WORKSPACE_URL) {
const { href } = new URL(GITPOD_WORKSPACE_URL) const { href } = new URL(GITPOD_WORKSPACE_URL);
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '') const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
if (type === 'github') { if (type === 'github') {
return `${newURL}/webhooks/github/events` return `${newURL}/webhooks/github/events`;
} }
if (type === 'gitlab') { if (type === 'gitlab') {
return `${newURL}/webhooks/gitlab/events` return `${newURL}/webhooks/gitlab/events`;
} }
} }
if (CODESANDBOX_HOST) { if (CODESANDBOX_HOST) {
const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}` const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
if (type === 'github') { if (type === 'github') {
return `${newURL}/webhooks/github/events` return `${newURL}/webhooks/github/events`;
} }
if (type === 'gitlab') { if (type === 'gitlab') {
return `${newURL}/webhooks/gitlab/events` return `${newURL}/webhooks/gitlab/events`;
} }
} }
return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`; return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`;
@@ -37,7 +39,7 @@ export function getWebhookUrl(type: string) {
async function send({ async function send({
method, method,
path, path,
data = {}, data = null,
headers, headers,
timeout = 120000 timeout = 120000
}: { }: {
@@ -51,7 +53,7 @@ async function send({
const controller = new AbortController(); const controller = new AbortController();
const id = setTimeout(() => controller.abort(), timeout); const id = setTimeout(() => controller.abort(), timeout);
const opts: any = { method, headers: {}, body: null, signal: controller.signal }; const opts: any = { method, headers: {}, body: null, signal: controller.signal };
if (Object.keys(data).length > 0) { if (data && Object.keys(data).length > 0) {
const parsedData = data; const parsedData = data;
for (const [key, value] of Object.entries(data)) { for (const [key, value] of Object.entries(data)) {
if (value === '') { if (value === '') {
@@ -83,7 +85,9 @@ async function send({
if (dev && !path.startsWith('https://')) { if (dev && !path.startsWith('https://')) {
path = `${getAPIUrl()}${path}`; path = `${getAPIUrl()}${path}`;
} }
if (method === 'POST' && data && !opts.body) {
opts.body = data;
}
const response = await fetch(`${path}`, opts); const response = await fetch(`${path}`, opts);
clearTimeout(id); clearTimeout(id);
@@ -103,7 +107,11 @@ async function send({
return {}; return {};
} }
if (!response.ok) { if (!response.ok) {
if (response.status === 401 && !path.startsWith('https://api.github') && !path.includes('/v4/user')) { if (
response.status === 401 &&
!path.startsWith('https://api.github') &&
!path.includes('/v4/')
) {
Cookies.remove('token'); Cookies.remove('token');
} }
@@ -126,7 +134,7 @@ export function del(
export function post( export function post(
path: string, path: string,
data: Record<string, unknown>, data: Record<string, unknown> | FormData,
headers?: Record<string, unknown> headers?: Record<string, unknown>
): Promise<Record<string, any>> { ): Promise<Record<string, any>> {
return send({ method: 'POST', path, data, headers }); return send({ method: 'POST', path, data, headers });

View File

@@ -3,7 +3,7 @@ import { addToast } from '$lib/store';
export const asyncSleep = (delay: number) => export const asyncSleep = (delay: number) =>
new Promise((resolve) => setTimeout(resolve, delay)); new Promise((resolve) => setTimeout(resolve, delay));
export function errorNotification(error: any): void { export function errorNotification(error: any | { message: string }): void {
if (error.message) { if (error.message) {
if (error.message === 'Cannot read properties of undefined (reading \'postMessage\')') { if (error.message === 'Cannot read properties of undefined (reading \'postMessage\')') {
return addToast({ return addToast({
@@ -83,4 +83,8 @@ export function handlerNotFoundLoad(error: any, url: URL) {
status: 500, status: 500,
error: new Error(`Could not load ${url}`) error: new Error(`Could not load ${url}`)
}; };
}
export function getRndInteger(min: number, max: number) {
return Math.floor(Math.random() * (max - min + 1)) + min;
} }

View File

@@ -0,0 +1 @@
<span class="badge bg-coollabs-gradient rounded text-white font-normal"> BETA </span>

View File

@@ -13,8 +13,9 @@
export let id: string; export let id: string;
export let name: string; export let name: string;
export let placeholder = ''; export let placeholder = '';
export let inputStyle = '';
let disabledClass = 'bg-coolback disabled:bg-coolblack'; let disabledClass = 'bg-coolback disabled:bg-coolblack w-full';
let isHttps = browser && window.location.protocol === 'https:'; let isHttps = browser && window.location.protocol === 'https:';
function copyToClipboard() { function copyToClipboard() {
@@ -32,6 +33,7 @@
{#if !isPasswordField || showPassword} {#if !isPasswordField || showPassword}
{#if textarea} {#if textarea}
<textarea <textarea
style={inputStyle}
rows="5" rows="5"
class={disabledClass} class={disabledClass}
class:pr-10={true} class:pr-10={true}
@@ -47,6 +49,7 @@
> >
{:else} {:else}
<input <input
style={inputStyle}
class={disabledClass} class={disabledClass}
type="text" type="text"
class:pr-10={true} class:pr-10={true}
@@ -63,6 +66,7 @@
{/if} {/if}
{:else} {:else}
<input <input
style={inputStyle}
class={disabledClass} class={disabledClass}
class:pr-10={true} class:pr-10={true}
class:pr-20={value && isHttps} class:pr-20={value && isHttps}
@@ -78,7 +82,7 @@
/> />
{/if} {/if}
<div class="absolute top-0 right-0 m-3 cursor-pointer text-stone-600 hover:text-white"> <div class="absolute top-0 right-0 flex justify-center items-center h-full cursor-pointer text-stone-600 hover:text-white mr-3">
<div class="flex space-x-2"> <div class="flex space-x-2">
{#if isPasswordField} {#if isPasswordField}
<div on:click={() => (showPassword = !showPassword)}> <div on:click={() => (showPassword = !showPassword)}>

View File

@@ -10,23 +10,10 @@
.slice(-16); .slice(-16);
</script> </script>
<a {id} href={url} target="_blank" class="icons inline-block text-pink-500 cursor-pointer text-xs"> <a {id} href={url} target="_blank" class="icons inline-block cursor-pointer text-xs mx-2">
<svg <svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
xmlns="http://www.w3.org/2000/svg" <path stroke-linecap="round" stroke-linejoin="round" d="M9.879 7.519c1.171-1.025 3.071-1.025 4.242 0 1.172 1.025 1.172 2.687 0 3.712-.203.179-.43.326-.67.442-.745.361-1.45.999-1.45 1.827v.75M21 12a9 9 0 11-18 0 9 9 0 0118 0zm-9 5.25h.008v.008H12v-.008z" />
class="w-6 h-6" </svg>
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
fill="none"
stroke-linecap="round"
stroke-linejoin="round"
>
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
<path
d="M6 4h11a2 2 0 0 1 2 2v12a2 2 0 0 1 -2 2h-11a1 1 0 0 1 -1 -1v-14a1 1 0 0 1 1 -1m3 0v18"
/>
<line x1="13" y1="8" x2="15" y2="8" />
<line x1="13" y1="12" x2="15" y2="12" />
</svg>
</a> </a>
<Tooltip triggeredBy={`#${id}`}>See details in the documentation</Tooltip> <Tooltip triggeredBy={`#${id}`}>See details in the documentation</Tooltip>

View File

@@ -1,26 +1,39 @@
<script lang="ts"> <script lang="ts">
import { onMount } from 'svelte'; // import { onMount } from 'svelte';
import Tooltip from './Tooltip.svelte'; // import Tooltip from './Tooltip.svelte';
export let explanation = ''; export let explanation = '';
let id: any; export let position = 'dropdown-right'
let self: any; // let id: any;
onMount(() => { // let self: any;
id = `info-${self.offsetLeft}-${self.offsetTop}`; // onMount(() => {
}); // id = `info-${self.offsetLeft}-${self.offsetTop}`;
// });
</script> </script>
<div {id} class="inline-block mx-2 text-pink-500 cursor-pointer" bind:this={self}> <div class={`dropdown dropdown-end ${position}`}>
<!-- svelte-ignore a11y-label-has-associated-control -->
<label tabindex="0" class="btn btn-circle btn-ghost btn-xs text-sky-500">
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" class="w-4 h-4 stroke-current"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path></svg>
</label>
<div tabindex="0" class="card compact dropdown-content shadow bg-coolgray-400 rounded w-64">
<div class="card-body">
<!-- <h2 class="card-title">You needed more info?</h2> -->
<p class="text-xs font-normal">{@html explanation}</p>
</div>
</div>
</div>
<!-- <div {id} class="inline-block mx-2 cursor-pointer" bind:this={self}>
<svg <svg
fill="none" fill="none"
height="18" height="14"
shape-rendering="geometricPrecision" shape-rendering="geometricPrecision"
stroke="currentColor" stroke="currentColor"
stroke-linecap="round" stroke-linecap="round"
stroke-linejoin="round" stroke-linejoin="round"
stroke-width="1.5" stroke-width="1.4"
viewBox="0 0 24 24" viewBox="0 0 24 24"
width="18" width="14"
><path d="M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" /><path ><path d="M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" /><path
d="M9.09 9a3 3 0 015.83 1c0 2-3 3-3 3" d="M9.09 9a3 3 0 015.83 1c0 2-3 3-3 3"
/><circle cx="12" cy="17" r=".5" /> /><circle cx="12" cy="17" r=".5" />
@@ -28,4 +41,4 @@
</div> </div>
{#if id} {#if id}
<Tooltip triggeredBy={`#${id}`}>{@html explanation}</Tooltip> <Tooltip triggeredBy={`#${id}`}>{@html explanation}</Tooltip>
{/if} {/if} -->

View File

@@ -1,10 +1,13 @@
<script lang="ts"> <script lang="ts">
import Beta from './Beta.svelte';
import Explaner from './Explainer.svelte'; import Explaner from './Explainer.svelte';
import Tooltip from './Tooltip.svelte'; import Tooltip from './Tooltip.svelte';
export let id: any; export let id: any;
export let customClass: any = null;
export let setting: any; export let setting: any;
export let title: any; export let title: any;
export let isBeta: any = false;
export let description: any; export let description: any;
export let isCenter = true; export let isCenter = true;
export let disabled = false; export let disabled = false;
@@ -15,12 +18,19 @@
<div class="flex items-center py-4 pr-8"> <div class="flex items-center py-4 pr-8">
<div class="flex w-96 flex-col"> <div class="flex w-96 flex-col">
<div class="text-xs font-bold text-stone-100 md:text-base"> <!-- svelte-ignore a11y-label-has-associated-control -->
{title}<Explaner explanation={description} /> <label>
</div> {title}
{#if isBeta}
<Beta />
{/if}
{#if description && description !== ''}
<Explaner explanation={description} />
{/if}
</label>
</div> </div>
</div> </div>
<div class:text-center={isCenter} class="flex justify-center"> <div class:text-center={isCenter} class={`flex justify-center ${customClass}`}>
<div <div
on:click on:click
aria-pressed="false" aria-pressed="false"

View File

@@ -2,6 +2,11 @@
import { createEventDispatcher } from 'svelte'; import { createEventDispatcher } from 'svelte';
const dispatch = createEventDispatcher(); const dispatch = createEventDispatcher();
export let type = 'info'; export let type = 'info';
function success() {
if (type === 'success') {
return 'bg-coollabs';
}
}
</script> </script>
<div <div
@@ -10,8 +15,7 @@
on:focus={() => dispatch('pause')} on:focus={() => dispatch('pause')}
on:mouseout={() => dispatch('resume')} on:mouseout={() => dispatch('resume')}
on:blur={() => dispatch('resume')} on:blur={() => dispatch('resume')}
class="alert shadow-lg text-white rounded hover:scale-105 transition-all duration-100 cursor-pointer" class={`flex flex-row alert shadow-lg text-white hover:scale-105 transition-all duration-100 cursor-pointer rounded ${success()}`}
class:bg-coollabs={type === 'success'}
class:alert-error={type === 'error'} class:alert-error={type === 'error'}
class:alert-info={type === 'info'} class:alert-info={type === 'info'}
> >

View File

@@ -1,5 +1,4 @@
<script lang="ts"> <script lang="ts">
import { fade } from 'svelte/transition';
import Toast from './Toast.svelte'; import Toast from './Toast.svelte';
import { dismissToast, pauseToast, resumeToast, toasts } from '$lib/store'; import { dismissToast, pauseToast, resumeToast, toasts } from '$lib/store';
@@ -7,9 +6,9 @@
{#if $toasts} {#if $toasts}
<section> <section>
<article class="toast toast-top toast-end rounded-none" role="alert" transition:fade> <article class="toast toast-top toast-end rounded-none px-10" role="alert" >
{#each $toasts as toast (toast.id)} {#each $toasts as toast (toast.id)}
<Toast <Toast
type={toast.type} type={toast.type}
on:resume={() => resumeToast(toast.id)} on:resume={() => resumeToast(toast.id)}
on:pause={() => pauseToast(toast.id)} on:pause={() => pauseToast(toast.id)}

View File

@@ -1,7 +1,7 @@
<script lang="ts"> <script lang="ts">
import { Tooltip } from 'flowbite-svelte'; import { Tooltip } from 'flowbite-svelte';
export let placement = 'bottom'; export let placement = 'bottom';
export let color = 'bg-coollabs text-left'; export let color = 'bg-coollabs font-thin text-left';
export let triggeredBy = '#tooltip-default'; export let triggeredBy = '#tooltip-default';
</script> </script>

View File

@@ -1,11 +1,11 @@
<script lang="ts"> <script lang="ts">
import { dev } from '$app/env'; import { dev } from '$app/env';
import { get, post } from '$lib/api'; import { get, post } from '$lib/api';
import { addToast, appSession, features } from '$lib/store'; import { addToast, appSession, features, updateLoading, isUpdateAvailable } from '$lib/store';
import { asyncSleep, errorNotification } from '$lib/common'; import { asyncSleep, errorNotification } from '$lib/common';
import { onMount } from 'svelte'; import { onMount } from 'svelte';
import Tooltip from './Tooltip.svelte';
let isUpdateAvailable = false;
let updateStatus: any = { let updateStatus: any = {
found: false, found: false,
loading: false, loading: false,
@@ -57,37 +57,41 @@
if ($appSession.userId) { if ($appSession.userId) {
const overrideVersion = $features.latestVersion; const overrideVersion = $features.latestVersion;
if ($appSession.teamId === '0') { if ($appSession.teamId === '0') {
if ($updateLoading === true) return;
try { try {
$updateLoading = true;
const data = await get(`/update`); const data = await get(`/update`);
if (overrideVersion || data?.isUpdateAvailable) { if (overrideVersion || data?.isUpdateAvailable) {
latestVersion = overrideVersion || data.latestVersion; latestVersion = overrideVersion || data.latestVersion;
if (overrideVersion) { if (overrideVersion) {
isUpdateAvailable = true; $isUpdateAvailable = true;
} else { } else {
isUpdateAvailable = data.isUpdateAvailable; $isUpdateAvailable = data.isUpdateAvailable;
} }
} }
} catch (error) { } catch (error) {
return errorNotification(error); return errorNotification(error);
} finally {
$updateLoading = false;
} }
} }
} }
}); });
</script> </script>
<div class="flex flex-col space-y-4 py-2"> <div class="py-0 lg:py-2">
{#if $appSession.teamId === '0'} {#if $appSession.teamId === '0'}
{#if isUpdateAvailable} {#if $isUpdateAvailable}
<button <button
id="update"
disabled={updateStatus.success === false} disabled={updateStatus.success === false}
on:click={update} on:click={update}
class="icons tooltip tooltip-right tooltip-primary bg-gradient-to-r from-purple-500 via-pink-500 to-red-500 text-white duration-75 hover:scale-105" class="icons bg-coollabs-gradient text-white duration-75 hover:scale-105 w-full"
data-tip="Update Available!"
> >
{#if updateStatus.loading} {#if updateStatus.loading}
<svg <svg
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
class="lds-heart h-9 w-8" class="lds-heart h-8 w-8"
viewBox="0 0 24 24" viewBox="0 0 24 24"
stroke-width="1.5" stroke-width="1.5"
stroke="currentColor" stroke="currentColor"
@@ -101,24 +105,27 @@
/> />
</svg> </svg>
{:else if updateStatus.success === null} {:else if updateStatus.success === null}
<svg <div class="flex items-center justify-center space-x-2">
xmlns="http://www.w3.org/2000/svg" <svg
class="h-9 w-8" xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24" class="h-8 w-8"
stroke-width="1.5" viewBox="0 0 24 24"
stroke="currentColor" stroke-width="1.5"
fill="none" stroke="currentColor"
stroke-linecap="round" fill="none"
stroke-linejoin="round" stroke-linecap="round"
> stroke-linejoin="round"
<path stroke="none" d="M0 0h24v24H0z" fill="none" /> >
<circle cx="12" cy="12" r="9" /> <path stroke="none" d="M0 0h24v24H0z" fill="none" />
<line x1="12" y1="8" x2="8" y2="12" /> <circle cx="12" cy="12" r="9" />
<line x1="12" y1="8" x2="12" y2="16" /> <line x1="12" y1="8" x2="8" y2="12" />
<line x1="16" y1="12" x2="12" y2="8" /> <line x1="12" y1="8" x2="12" y2="16" />
</svg> <line x1="16" y1="12" x2="12" y2="8" />
</svg>
<span class="flex lg:hidden">Update available</span>
</div>
{:else if updateStatus.success} {:else if updateStatus.success}
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-9 w-8" <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-8 w-8"
><path ><path
fill="#DD2E44" fill="#DD2E44"
d="M11.626 7.488c-.112.112-.197.247-.268.395l-.008-.008L.134 33.141l.011.011c-.208.403.14 1.223.853 1.937.713.713 1.533 1.061 1.936.853l.01.01L28.21 24.735l-.008-.009c.147-.07.282-.155.395-.269 1.562-1.562-.971-6.627-5.656-11.313-4.687-4.686-9.752-7.218-11.315-5.656z" d="M11.626 7.488c-.112.112-.197.247-.268.395l-.008-.008L.134 33.141l.011.011c-.208.403.14 1.223.853 1.937.713.713 1.533 1.061 1.936.853l.01.01L28.21 24.735l-.008-.009c.147-.07.282-.155.395-.269 1.562-1.562-.971-6.627-5.656-11.313-4.687-4.686-9.752-7.218-11.315-5.656z"
@@ -183,6 +190,9 @@
> >
{/if} {/if}
</button> </button>
<Tooltip triggeredBy="#update" placement="right" color="bg-coolgray-200 text-white"
>New Version Available!</Tooltip
>
{/if} {/if}
{/if} {/if}
</div> </div>

View File

@@ -0,0 +1,20 @@
<script lang="ts">
import { post } from '$lib/api';
let cert: any;
let key: any;
async function submitForm() {
const formData = new FormData();
formData.append('cert', cert[0]);
formData.append('key', key[0]);
await post('/upload', formData);
}
</script>
<form on:submit|preventDefault={submitForm}>
<label for="cert">Certificate</label>
<input id="cert" type="file" required name="cert" bind:files={cert} />
<label for="key">Private Key</label>
<input id="key" type="file" required name="key" bind:files={key} />
<br />
<input type="submit" />
</form>

View File

@@ -1,4 +1,5 @@
<script lang="ts"> <script lang="ts">
export let server: any;
let usage = { let usage = {
cpu: { cpu: {
load: [0, 0, 0], load: [0, 0, 0],
@@ -26,10 +27,11 @@
import { onDestroy, onMount } from 'svelte'; import { onDestroy, onMount } from 'svelte';
import { get, post } from '$lib/api'; import { get, post } from '$lib/api';
import { errorNotification } from '$lib/common'; import { errorNotification } from '$lib/common';
import Beta from './Beta.svelte';
async function getStatus() { async function getStatus() {
if (loading.usage) return; if (loading.usage) return;
loading.usage = true; loading.usage = true;
const data = await get('/usage'); const data = await get(`/servers/usage/${server.id}?remoteEngine=${server.remoteEngine}`);
usage = data.usage; usage = data.usage;
loading.usage = false; loading.usage = false;
} }
@@ -52,7 +54,7 @@
async function manuallyCleanupStorage() { async function manuallyCleanupStorage() {
try { try {
loading.cleanup = true; loading.cleanup = true;
await post('/internal/cleanup', {}); await post('/internal/cleanup', { serverId: server.id });
return addToast({ return addToast({
message: 'Cleanup done.', message: 'Cleanup done.',
type: 'success' type: 'success'
@@ -65,38 +67,63 @@
} }
</script> </script>
<div class="w-full"> <div class="w-full relative p-5 ">
<div class="flex lg:flex-row flex-col gap-4"> {#if loading.usage}
<h1 class="title lg:text-3xl">Hardware Details</h1> <span class="indicator-item badge bg-yellow-500 badge-sm" />
<div class="flex lg:flex-row flex-col space-x-0 lg:space-x-2 space-y-2 lg:space-y-0"> {:else}
{#if $appSession.teamId === '0'} <span class="indicator-item badge bg-success badge-sm" />
<button on:click={manuallyCleanupStorage} class:loading={loading.cleanup} class="btn btn-sm" {/if}
>Cleanup Storage</button
> <div class="w-full flex flex-col lg:flex-row space-y-4 lg:space-y-0 space-x-4">
{/if} <div class="flex flex-col">
<h1 class="font-bold text-lg lg:text-xl truncate">
{server.name}
{#if server.remoteEngine}
<Beta />
{/if}
</h1>
<div class="text-xs">
{#if server?.remoteIpAddress}
<h2>{server?.remoteIpAddress}</h2>
{:else}
<h2>localhost</h2>
{/if}
</div>
</div> </div>
{#if $appSession.teamId === '0'}
<button
disabled={loading.cleanup}
on:click={manuallyCleanupStorage}
class:loading={loading.cleanup}
class:bg-coollabs={!loading.cleanup}
class="btn btn-sm">Cleanup Storage</button
>
{/if}
</div>
<div class="flex lg:flex-row flex-col gap-4">
<div class="flex lg:flex-row flex-col space-x-0 lg:space-x-2 space-y-2 lg:space-y-0" />
</div> </div>
<div class="divider" /> <div class="divider" />
<div class="grid grid-flow-col gap-4 grid-rows-3 justify-start lg:justify-center lg:grid-rows-1"> <div class="grid grid-flow-col gap-4 grid-rows-3 justify-start lg:justify-center lg:grid-rows-1">
<div class="stats stats-vertical min-w-[16rem] mb-5 rounded bg-transparent"> <div class="stats stats-vertical min-w-[16rem] mb-5 rounded bg-transparent">
<div class="stat"> <div class="stat">
<div class="stat-title">Total Memory</div> <div class="stat-title">Total Memory</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{(usage?.memory.totalMemMb).toFixed(0)}<span class="text-sm">MB</span> {(usage?.memory?.totalMemMb).toFixed(0)}<span class="text-sm">MB</span>
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">Used Memory</div> <div class="stat-title">Used Memory</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{(usage?.memory.usedMemMb).toFixed(0)}<span class="text-sm">MB</span> {(usage?.memory?.usedMemMb).toFixed(0)}<span class="text-sm">MB</span>
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">Free Memory</div> <div class="stat-title">Free Memory</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.memory.freeMemPercentage}<span class="text-sm">%</span> {(usage?.memory?.freeMemPercentage).toFixed(0)}<span class="text-sm">%</span>
</div> </div>
</div> </div>
</div> </div>
@@ -104,42 +131,42 @@
<div class="stats stats-vertical min-w-[20rem] mb-5 bg-transparent rounded"> <div class="stats stats-vertical min-w-[20rem] mb-5 bg-transparent rounded">
<div class="stat"> <div class="stat">
<div class="stat-title">Total CPU</div> <div class="stat-title">Total CPU</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.cpu.count} {usage?.cpu?.count}
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">CPU Usage</div> <div class="stat-title">CPU Usage</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.cpu.usage}<span class="text-sm">%</span> {usage?.cpu?.usage}<span class="text-sm">%</span>
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">Load Average (5,10,30mins)</div> <div class="stat-title">Load Average (5,10,30mins)</div>
<div class="stat-value text-2xl">{usage?.cpu.load}</div> <div class="stat-value text-2xl text-white">{usage?.cpu?.load}</div>
</div> </div>
</div> </div>
<div class="stats stats-vertical min-w-[16rem] mb-5 bg-transparent rounded"> <div class="stats stats-vertical min-w-[16rem] mb-5 bg-transparent rounded">
<div class="stat"> <div class="stat">
<div class="stat-title">Total Disk</div> <div class="stat-title">Total Disk</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.disk.totalGb}<span class="text-sm">GB</span> {usage?.disk?.totalGb}<span class="text-sm">GB</span>
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">Used Disk</div> <div class="stat-title">Used Disk</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.disk.usedGb}<span class="text-sm">GB</span> {usage?.disk?.usedGb}<span class="text-sm">GB</span>
</div> </div>
</div> </div>
<div class="stat"> <div class="stat">
<div class="stat-title">Free Disk</div> <div class="stat-title">Free Disk</div>
<div class="stat-value text-2xl"> <div class="stat-value text-2xl text-white">
{usage?.disk.freePercentage}<span class="text-sm">%</span> {usage?.disk?.freePercentage}<span class="text-sm">%</span>
</div> </div>
</div> </div>
</div> </div>

View File

@@ -40,4 +40,6 @@
<Icons.Laravel {isAbsolute} /> <Icons.Laravel {isAbsolute} />
{:else if application.buildPack?.toLowerCase() === 'heroku'} {:else if application.buildPack?.toLowerCase() === 'heroku'}
<Icons.Heroku {isAbsolute} /> <Icons.Heroku {isAbsolute} />
{:else if application.buildPack?.toLowerCase() === 'compose'}
<Icons.Compose {isAbsolute} />
{/if} {/if}

View File

@@ -0,0 +1,9 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<img
alt="docker compose logo"
class={isAbsolute ? 'w-16 h-16 absolute top-0 left-0 -m-8' : 'w-8 h-8 mx-auto'}
src="/docker-compose.png"
/>

View File

@@ -17,3 +17,4 @@ export { default as Eleventy } from './Eleventy.svelte';
export { default as Deno } from './Deno.svelte'; export { default as Deno } from './Deno.svelte';
export { default as Laravel } from './Laravel.svelte'; export { default as Laravel } from './Laravel.svelte';
export { default as Heroku } from './Heroku.svelte'; export { default as Heroku } from './Heroku.svelte';
export { default as Compose } from './Compose.svelte';

View File

@@ -16,4 +16,6 @@
<Icons.Redis {isAbsolute} /> <Icons.Redis {isAbsolute} />
{:else if type === 'couchdb'} {:else if type === 'couchdb'}
<Icons.CouchDB {isAbsolute} /> <Icons.CouchDB {isAbsolute} />
{:else if type === 'edgedb'}
<Icons.EdgeDB {isAbsolute} />
{/if} {/if}

View File

@@ -0,0 +1,22 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<svg
class={isAbsolute ? 'absolute top-0 left-0 -m-8 h-16 w-16' : 'mx-auto w-12 h-12'}
width="88"
fill="#1F8AED"
height="101"
viewBox="0 -15 88 101"
><path
class="pageNav_logoBar__2v4ah"
style="transform-origin:center 35.5px"
fill-rule="evenodd"
clip-rule="evenodd"
d="M55.1436 71H58.1436V0H55.1436V71Z"
/><path
fill-rule="evenodd"
clip-rule="evenodd"
d="M74.5362 35.3047C74.5362 41.3776 72.1013 42.4662 69.3799 42.4662H63.5935V28.1432H69.3799C72.1013 28.1432 74.5362 29.2318 74.5362 35.3047V35.3047ZM71.5862 35.3047C71.5862 31.0651 70.2971 30.8646 68.4352 30.8646H66.6305V39.7448H68.4352C70.2971 39.7448 71.5862 39.5443 71.5862 35.3047V35.3047ZM40.9348 42.4662V28.1432H50.0442V30.8646H43.9713V33.7865H48.5546V36.4792H43.9713V39.7448H50.0442V42.4662H40.9348ZM80.6092 36.1068V39.7448H83.13C84.7055 39.7448 85.1066 38.7135 85.1066 37.9401C85.1066 37.3385 84.8201 36.1068 82.6717 36.1068H80.6092ZM80.6092 30.8646V33.5859H82.6717C83.8462 33.5859 84.5337 33.0703 84.5337 32.2109C84.5337 31.3516 83.8462 30.8646 82.6717 30.8646H80.6092ZM77.5732 28.1432H83.4169C86.482 28.1432 87.3987 30.2917 87.3987 31.8385C87.3987 33.2708 86.482 34.3021 85.8518 34.5885C87.6851 35.4766 88.0002 37.2813 88.0002 38.1979C88.0002 39.401 87.3987 42.4662 83.4169 42.4662H77.5732V28.1432ZM23.4899 35.3047C23.4899 41.3776 21.055 42.4662 18.3337 42.4662H12.5472V28.1432H18.3337C21.055 28.1432 23.4899 29.2318 23.4899 35.3047V35.3047ZM32.4272 39.8594C33.974 39.8594 34.7761 39.3438 35.0626 39V37.4245H32.599V34.9609H37.4975V40.6615C37.0678 41.3203 34.7188 42.6094 32.5704 42.6094C29.047 42.6094 26.0678 41.2344 26.0678 35.1615C26.0678 29.0885 29.0756 28 31.797 28C36.0652 28 37.1251 30.2344 37.4688 32.2109L34.948 32.7839C34.8048 31.8672 34.0027 30.7214 32.1694 30.7214C30.3074 30.7214 29.0183 30.9219 29.0183 35.1615C29.0183 39.401 30.3647 39.8594 32.4272 39.8594V39.8594ZM20.539 35.3047C20.539 31.0651 19.2499 30.8646 17.3879 30.8646H15.5833V39.7448H17.3879C19.2499 39.7448 20.539 39.5443 20.539 35.3047V35.3047ZM0 42.4662V28.1432H9.10938V30.8646H3.03646V33.7865H7.61979V36.4792H3.03646V39.7448H9.10938V42.4662H0Z"
/></svg
>

View File

@@ -9,15 +9,8 @@
viewBox="0 0 309.88 252.72" viewBox="0 0 309.88 252.72"
class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-12 w-12 ' : 'mx-auto w-8 h-8'} class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-12 w-12 ' : 'mx-auto w-8 h-8'}
> >
<defs>
<style>
.cls-1 {
fill: #fff;
}
</style>
</defs>
<path <path
class="cls-1" fill="#fff"
d="M316,10.05a4.2,4.2,0,0,0-2.84-1c-2.84,0-6.5,1.92-8.46,3l-.79.4a26.81,26.81,0,0,1-10.57,2.66c-3.76.12-7,.34-11.22.77-25,2.58-36.15,21.74-46.89,40.27-5.84,10.08-11.88,20.5-20.16,28.57a55.71,55.71,0,0,1-5.46,4.63c-8.57,6.39-19.33,10.9-27.74,14.12-8.07,3.08-16.86,5.85-25.37,8.53-7.78,2.45-15.14,4.76-21.9,7.28-3.05,1.13-5.64,2-7.93,2.76-6.15,2-10.6,3.53-17.08,8-2.53,1.73-5.07,3.6-6.8,5a71.26,71.26,0,0,0-13.54,14.27A84.81,84.81,0,0,1,77.88,163c-1.36,1.34-3.8,2-7.43,2-4.27,0-9.43-.88-14.91-1.81s-11.46-2-16.46-2c-4.07,0-7.17.66-9.5,2,0,0-3.9,2.28-5.56,5.23l1.62.73a33.56,33.56,0,0,1,6.93,5,33.68,33.68,0,0,0,7.19,5.12A6.37,6.37,0,0,1,42,180.72c-.69,1-1.69,2.29-2.74,3.67-5.77,7.55-9.13,12.32-7.2,14.92a6,6,0,0,0,3,.68c12.59,0,19.34-3.27,27.9-7.41,2.47-1.2,5-2.44,8-3.7,5-2.17,10.38-5.63,16.08-9.29,7.55-4.85,15.36-9.87,22.92-12.3a62.3,62.3,0,0,1,19.23-2.7c8,0,16.42,1.07,24.54,2.11,6.06.78,12.32,1.58,18.47,2,2.39.14,4.6.21,6.76.21a78.48,78.48,0,0,0,8.61-.45l.68-.24c4.32-2.65,6.34-8.34,8.29-13.84,1.26-3.54,2.32-6.72,4-8.74a2.06,2.06,0,0,1,.33-.27.4.4,0,0,1,.49.08.25.25,0,0,1,0,.16c-1,21.51-9.67,35.16-18.42,47.3L177,199.14s8.18,0,12.84-1.8c17-5.08,29.84-16.28,39.18-34.14a144.39,144.39,0,0,0,6.16-14.09c.16-.4,1.64-1.14,1.49.93,0,.61-.08,1.29-.13,2h0c0,.42-.06.85-.08,1.28-.25,3-1,9.34-1,9.34l5.25-2.81c12.66-8,22.42-24.14,29.82-49.25,3.09-10.46,5.34-20.85,7.33-30,2.38-11,4.43-20.43,6.78-24.09,3.69-5.74,9.32-9.62,14.77-13.39.75-.51,1.49-1,2.22-1.54,6.86-4.81,13.67-10.36,15.16-20.71l0-.23C317.93,12.92,317,11,316,10.05Z" d="M316,10.05a4.2,4.2,0,0,0-2.84-1c-2.84,0-6.5,1.92-8.46,3l-.79.4a26.81,26.81,0,0,1-10.57,2.66c-3.76.12-7,.34-11.22.77-25,2.58-36.15,21.74-46.89,40.27-5.84,10.08-11.88,20.5-20.16,28.57a55.71,55.71,0,0,1-5.46,4.63c-8.57,6.39-19.33,10.9-27.74,14.12-8.07,3.08-16.86,5.85-25.37,8.53-7.78,2.45-15.14,4.76-21.9,7.28-3.05,1.13-5.64,2-7.93,2.76-6.15,2-10.6,3.53-17.08,8-2.53,1.73-5.07,3.6-6.8,5a71.26,71.26,0,0,0-13.54,14.27A84.81,84.81,0,0,1,77.88,163c-1.36,1.34-3.8,2-7.43,2-4.27,0-9.43-.88-14.91-1.81s-11.46-2-16.46-2c-4.07,0-7.17.66-9.5,2,0,0-3.9,2.28-5.56,5.23l1.62.73a33.56,33.56,0,0,1,6.93,5,33.68,33.68,0,0,0,7.19,5.12A6.37,6.37,0,0,1,42,180.72c-.69,1-1.69,2.29-2.74,3.67-5.77,7.55-9.13,12.32-7.2,14.92a6,6,0,0,0,3,.68c12.59,0,19.34-3.27,27.9-7.41,2.47-1.2,5-2.44,8-3.7,5-2.17,10.38-5.63,16.08-9.29,7.55-4.85,15.36-9.87,22.92-12.3a62.3,62.3,0,0,1,19.23-2.7c8,0,16.42,1.07,24.54,2.11,6.06.78,12.32,1.58,18.47,2,2.39.14,4.6.21,6.76.21a78.48,78.48,0,0,0,8.61-.45l.68-.24c4.32-2.65,6.34-8.34,8.29-13.84,1.26-3.54,2.32-6.72,4-8.74a2.06,2.06,0,0,1,.33-.27.4.4,0,0,1,.49.08.25.25,0,0,1,0,.16c-1,21.51-9.67,35.16-18.42,47.3L177,199.14s8.18,0,12.84-1.8c17-5.08,29.84-16.28,39.18-34.14a144.39,144.39,0,0,0,6.16-14.09c.16-.4,1.64-1.14,1.49.93,0,.61-.08,1.29-.13,2h0c0,.42-.06.85-.08,1.28-.25,3-1,9.34-1,9.34l5.25-2.81c12.66-8,22.42-24.14,29.82-49.25,3.09-10.46,5.34-20.85,7.33-30,2.38-11,4.43-20.43,6.78-24.09,3.69-5.74,9.32-9.62,14.77-13.39.75-.51,1.49-1,2.22-1.54,6.86-4.81,13.67-10.36,15.16-20.71l0-.23C317.93,12.92,317,11,316,10.05Z"
transform="translate(-7.45 -9.1)" transform="translate(-7.45 -9.1)"
/> />

View File

@@ -6,5 +6,6 @@ export { default as MongoDB } from './MongoDB.svelte';
export { default as MySQL } from './MySQL.svelte'; export { default as MySQL } from './MySQL.svelte';
export { default as PostgreSQL } from './PostgreSQL.svelte'; export { default as PostgreSQL } from './PostgreSQL.svelte';
export { default as Redis } from './Redis.svelte'; export { default as Redis } from './Redis.svelte';
export { default as EdgeDB } from './EdgeDB.svelte';

View File

@@ -5,7 +5,7 @@
<svg <svg
viewBox="0 0 700 240" viewBox="0 0 700 240"
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
class={isAbsolute ? 'w-36 absolute top-0 left-0 -m-3 -mt-5' : 'w-28 h-28 mx-auto'} class={isAbsolute ? 'w-36 absolute top-0 left-0 -m-3 -mt-5' : 'w-full h-10 mx-auto'}
><path fill="#FDBC3D" d="m90.694 107.498-.981.39-20.608 8.23 6.332 6.547z" /><path ><path fill="#FDBC3D" d="m90.694 107.498-.981.39-20.608 8.23 6.332 6.547z" /><path
fill="#8EC63F" fill="#8EC63F"
d="M61.139 77.914 46.632 93 56.9 103.547c8.649-7.169 17.832-10.502 18.653-10.789L61.139 77.914z" d="M61.139 77.914 46.632 93 56.9 103.547c8.649-7.169 17.832-10.502 18.653-10.789L61.139 77.914z"

View File

@@ -0,0 +1,9 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<img
alt="grafana logo"
class={isAbsolute ? 'w-12 h-12 absolute top-0 left-0 -m-3 -mt-5' : 'w-8 h-8 mx-auto'}
src="/grafana.png"
/>

View File

@@ -42,4 +42,8 @@
<Icons.Searxng {isAbsolute} /> <Icons.Searxng {isAbsolute} />
{:else if type === 'weblate'} {:else if type === 'weblate'}
<Icons.Weblate {isAbsolute} /> <Icons.Weblate {isAbsolute} />
{:else if type === 'grafana'}
<Icons.Grafana {isAbsolute} />
{:else if type === 'trilium'}
<Icons.Trilium {isAbsolute} />
{/if} {/if}

View File

@@ -0,0 +1,9 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<img
alt="trilium logo"
class={isAbsolute ? 'w-9 h-9 absolute top-3 left-0 -m-3 -mt-5' : 'w-8 h-8 mx-auto'}
src="/trilium.png"
/>

View File

@@ -4,7 +4,7 @@
<svg <svg
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
class={isAbsolute ? 'w-16 h-16 absolute top-0 left-0 -m-7' : 'w-12 h-12 mx-auto'} class={isAbsolute ? 'w-16 h-16 absolute top-0 left-0 -m-7' : 'w-10 h-10 mx-auto'}
version="1.1" version="1.1"
viewBox="0 0 300 300" viewBox="0 0 300 300"
><linearGradient ><linearGradient

View File

@@ -17,4 +17,6 @@ export { default as Appwrite } from './Appwrite.svelte';
export { default as Moodle } from './Moodle.svelte'; export { default as Moodle } from './Moodle.svelte';
export { default as GlitchTip } from './GlitchTip.svelte'; export { default as GlitchTip } from './GlitchTip.svelte';
export { default as Searxng } from './Searxng.svelte'; export { default as Searxng } from './Searxng.svelte';
export { default as Weblate } from './Weblate.svelte'; export { default as Weblate } from './Weblate.svelte';
export { default as Grafana } from './Grafana.svelte';
export { default as Trilium } from './Trilium.svelte'

Some files were not shown because too many files have changed in this diff Show More