Compare commits

...

946 Commits
v3.10.4 ... v3

Author SHA1 Message Date
Andras Bacsai
b80a519b80 updates 2023-09-05 11:31:55 +02:00
Andras Bacsai
2fa7ffc931 updates 2023-09-05 11:25:16 +02:00
Andras Bacsai
4abec14a21 updates 2023-09-05 11:24:42 +02:00
Andras Bacsai
18d0623011 force volume prune 2023-08-16 15:27:10 +02:00
Andras Bacsai
aa634c78d1 fix seed js 2023-08-16 15:26:36 +02:00
Andras Bacsai
a2d4373104 cleanup volumes as well 2023-08-16 15:26:33 +02:00
Andras Bacsai
702e16d643 rename rollback to upgrade 2023-08-14 09:23:33 +02:00
Andras Bacsai
3b25c8f96b fix: docker compose env file 2023-08-12 00:10:14 +02:00
Andras Bacsai
1c8c567791 fix: env variables in compose deplyoments 2023-07-27 12:40:58 +02:00
Andras Bacsai
807a3c9d66 fix: n8n double mount
version++
2023-07-26 10:38:36 +02:00
Andras Bacsai
2abd7bd7bb copy to persisten storage 2023-07-25 12:38:36 +02:00
Andras Bacsai
343957ab8b update backups 2023-07-25 12:33:05 +02:00
Andras Bacsai
49261308f7 update webhook 2023-07-25 12:00:31 +02:00
Andras Bacsai
d037409237 updates 2023-07-25 11:40:27 +02:00
Andras Bacsai
338cbf62a1 fix: encrypt decrypt 2023-07-25 10:48:31 +02:00
Andras Bacsai
4c51bffc7b save db backup on seed 2023-07-20 21:41:47 +02:00
Andras Bacsai
fd98ba8812 auto update every hour 2023-07-20 16:37:24 +02:00
Andras Bacsai
930251e9c8 autoupdate fixed 2023-07-20 16:19:54 +02:00
Andras Bacsai
7cd441266a remove console log 2023-07-20 14:52:32 +02:00
Andras Bacsai
990fb8ec15 fix 2023-07-20 14:52:16 +02:00
Andras Bacsai
3fe982b2f4 Merge pull request #1052 from f-kawamura/bugfix-http-git-source
[Bug] Added support for HTTP source URLs in Git source
2023-07-20 13:48:34 +02:00
Andras Bacsai
9dd874e959 Merge pull request #1147 from martijnmichel/v3
Update serviceFields.ts
2023-07-20 13:46:52 +02:00
Andras Bacsai
b91368223b update plausible docs 2023-07-20 13:39:29 +02:00
Andras Bacsai
139670372b updates for templates 2023-07-20 13:29:03 +02:00
Andras Bacsai
1c0769ad75 update tags + only download on service view 2023-07-20 13:12:54 +02:00
Andras Bacsai
e6cbcf98cb fix: cleanup plausible 2023-07-20 13:06:19 +02:00
martijnmichel
64b0481055 Update serviceFields.ts 2023-07-20 08:19:51 +02:00
Andras Bacsai
ce15161926 Merge pull request #1144 from coollabsio/feature/implement-basic-auth-handling
fix: traefik config + ui + api
2023-07-18 15:37:57 +02:00
Andras Bacsai
4003d4d894 Merge pull request #1071 from pascal-klesse/feature/implement-basic-auth-handling
feat: Implement basic auth for applications
2023-07-18 15:37:35 +02:00
Andras Bacsai
6e011025a7 fix: traefik config + ui + api 2023-07-18 15:34:05 +02:00
Andras Bacsai
6c0544adb2 Merge branch 'v2' into feature/implement-basic-auth-handling 2023-07-18 14:48:02 +02:00
Andras Bacsai
8e4f7c9065 remove console.log 2023-07-18 14:44:46 +02:00
Andras Bacsai
e71f890b54 Merge pull request #1084 from Geczy/main
add trycatch
2023-07-18 14:44:32 +02:00
Andras Bacsai
4dc35dea97 Merge pull request #1119 from jenishngl/patch-1
Fixing the help link - source.svelte line 263
2023-07-18 14:40:33 +02:00
Andras Bacsai
b63dfb4bcd feat: backup databases 2023-07-18 14:36:54 +02:00
Andras Bacsai
b2ffd9183b fix: set connection string on publicity change 2023-07-18 13:01:10 +02:00
Andras Bacsai
5cb0bcfd9b fix: increase query time for new services etc 2023-07-18 09:50:05 +02:00
Jenish J
1fbcfcaf74 Merge branch 'v3' into patch-1 2023-07-17 20:15:27 +05:30
Andras Bacsai
3ba44a1e23 backup db file 2023-07-17 15:17:18 +02:00
Andras Bacsai
de4efbb555 update GH actions 2023-07-17 14:36:33 +02:00
Andras Bacsai
6f443680f3 typo 2023-07-17 14:02:51 +02:00
Andras Bacsai
23b22e5ca8 wip: reencrypt everything 2023-07-17 13:50:26 +02:00
Andras Bacsai
1bba747ce5 update 2023-07-15 09:50:29 +02:00
Andras Bacsai
9ebfc6646e wip 2023-07-14 23:15:43 +02:00
Andras Bacsai
055ff6dbbd updates 2023-07-14 22:30:40 +02:00
Andras Bacsai
6430e7b288 fix seed 2023-07-14 22:19:52 +02:00
Andras Bacsai
87b0050161 update seed 2023-07-14 22:11:59 +02:00
Andras Bacsai
369d8b408d version ++ 2023-07-14 21:36:28 +02:00
Andras Bacsai
505abc592c fix: gh actions 2023-07-14 21:33:30 +02:00
Andras Bacsai
c9df812258 testing seeder 2023-07-14 21:30:08 +02:00
Andras Bacsai
0bfcf6b66f fix: gh actions 2023-07-14 21:06:26 +02:00
Andras Bacsai
67853acabd docs link update 2023-07-14 20:58:54 +02:00
Jenish J
79c98657b1 Update source.svelte line 263 - Correcting the help link
Updated to the correct help link, as the old link was not pointing to the correct section in the docs.coollabs.io page
2023-06-28 20:45:03 +05:30
Geczy
d1be7e44af add trycatch 2023-05-25 13:39:57 -04:00
Andras Bacsai
33b853b981 Merge pull request #1081 from coollabsio/next
v3.12.32
2023-05-25 09:14:17 +02:00
Andras Bacsai
e6063fb93a fix: force delete stucked destinations 2023-05-24 21:55:24 +02:00
Andras Bacsai
f30f23af59 fix: restart storage volumes 2023-05-24 20:51:33 +02:00
Andras Bacsai
d4798a3b22 fix: more aggressive cleanup 2023-05-24 20:34:40 +02:00
Pascal Klesse
eefc2a3d0e remove TODO 2023-05-15 09:33:03 +02:00
Pascal Klesse
d14ca724e9 feat: Implement basic auth for applications 2023-05-15 09:27:49 +02:00
f-kawamura
7b05aaffc3 fix: Added support for HTTP source URLs in Git source. Currently only support HTTPS 2023-04-24 16:12:47 +09:00
Andras Bacsai
f3beb5d8db Merge pull request #1045 from coollabsio/next
v3.12.31
2023-04-19 08:53:20 +02:00
Andras Bacsai
e86b916415 fix: application logs duplicate 2023-04-19 08:50:55 +02:00
Andras Bacsai
e14cc6f2f0 remove assignment for issues 2023-04-18 14:52:26 +02:00
Andras Bacsai
8c1eb94401 fix: remove git is not necessary for docker bp 2023-04-18 14:51:31 +02:00
Andras Bacsai
29fa421945 feat: add custom version/tag 2023-04-18 14:46:40 +02:00
Andras Bacsai
7cfe98d988 fix: fail build if no application found. 2023-04-18 14:32:29 +02:00
Andras Bacsai
e2314c350b update lock file 2023-04-18 14:27:54 +02:00
Andras Bacsai
3713b33578 Update templates
fix: non string inputs in templates
2023-04-18 14:22:46 +02:00
Andras Bacsai
e007a773fd Merge pull request #1030 from coollabsio/next
v3.12.30
2023-04-03 10:40:27 +02:00
Andras Bacsai
e2821118eb fix. removing git 2023-04-03 10:21:08 +02:00
Andras Bacsai
4c8e73ac86 fix: harder to remove destinations and sources 2023-04-03 09:55:13 +02:00
Andras Bacsai
cb980fb814 fix: docker compose generator 2023-04-03 08:59:48 +02:00
Andras Bacsai
41c84e3642 Merge pull request #1001 from coollabsio/next
v3.12.29
2023-03-20 13:57:01 +01:00
Andras Bacsai
2bad98424f switch back to aarch-runners 2023-03-20 13:49:41 +01:00
Andras Bacsai
bc6b1e2dea fix: remove .git dir from final image 2023-03-20 13:05:53 +01:00
Andras Bacsai
911c15d1be update versions 2023-03-20 12:44:45 +01:00
Andras Bacsai
f79d570870 fix: gitea 2023-03-20 12:28:23 +01:00
Andras Bacsai
7fffa9fba5 Merge branch 'main' into next 2023-03-20 12:05:21 +01:00
Andras Bacsai
cbd634fb99 Update README.md 2023-03-17 15:31:00 +01:00
Andras Bacsai
7ae7436d4f Update staging-release.yml 2023-03-17 15:27:16 +01:00
Andras Bacsai
641bada100 ignore dockerhub releases 2023-03-16 13:54:58 +01:00
Andras Bacsai
3416d8d88e only arm 2023-03-16 13:42:19 +01:00
Andras Bacsai
0bb503368b concurrency 2023-03-16 13:37:49 +01:00
Andras Bacsai
ac3a77c3c7 no qemu 2023-03-16 13:35:04 +01:00
Andras Bacsai
79b4178d76 vcpu increase 2023-03-16 13:32:48 +01:00
Andras Bacsai
42a61296d7 test buildjet 2023-03-16 13:29:13 +01:00
Andras Bacsai
e8088e2a70 Merge pull request #993 from coollabsio/next
fix: revert from dockerhub if ghcr.io does not exists
2023-03-16 13:10:58 +01:00
Andras Bacsai
c4d39aced2 fix: revert from dockerhub if ghcr.io does not exists 2023-03-16 13:10:34 +01:00
Andras Bacsai
b40a5adeb0 update GH actions 2023-03-16 12:28:40 +01:00
Andras Bacsai
558a900620 Merge pull request #992 from coollabsio/next
Move to ghcr.io
2023-03-16 12:18:57 +01:00
Andras Bacsai
6b5e5a504d updates 2023-03-16 12:09:48 +01:00
Andras Bacsai
e44dca2464 updates 2023-03-16 12:01:57 +01:00
Andras Bacsai
e1f84b277a updates 2023-03-16 11:57:04 +01:00
Andras Bacsai
2518f46b08 remove fluentbit + pocketbase builds 2023-03-16 11:56:19 +01:00
Andras Bacsai
01e18a9496 Merge pull request #991 from coollabsio/ghcr
Move to ghcr from dockerhub
2023-03-16 10:55:22 +01:00
Andras Bacsai
564ca709d3 updates 2023-03-16 10:53:54 +01:00
Andras Bacsai
a54a36ae18 updates 2023-03-16 10:50:26 +01:00
Andras Bacsai
43603b0961 update 2023-03-16 10:26:20 +01:00
Andras Bacsai
96cd99f904 fixes 2023-03-16 10:23:14 +01:00
Andras Bacsai
3438d10e25 test 2023-03-16 10:13:44 +01:00
Andras Bacsai
022ccb42a1 test 2023-03-16 10:04:53 +01:00
Andras Bacsai
e6d72e9f87 test 2023-03-16 09:56:39 +01:00
Andras Bacsai
06e8a6af23 test 2023-03-16 09:38:20 +01:00
Andras Bacsai
ac188d137a test 2023-03-16 09:32:20 +01:00
Andras Bacsai
cae466745a test 2023-03-16 09:10:11 +01:00
Andras Bacsai
d61f16dab0 test 2023-03-16 08:48:37 +01:00
Andras Bacsai
02ba277a86 fix: show ip address as host in public dbs 2023-03-07 13:25:08 +01:00
Andras Bacsai
470ff49a02 Merge pull request #981 from coollabsio/next
v3.12.26
2023-03-07 12:19:36 +01:00
Andras Bacsai
04d741581d Merge pull request #980 from hyenabyte/main
Fixing multiple remotes breaking the server overview
2023-03-07 12:12:59 +01:00
Andras Bacsai
038f210148 Merge branch 'main' into next 2023-03-07 11:47:29 +01:00
Andras Bacsai
2adad3a7bd fix: handle log format volumes 2023-03-07 11:46:23 +01:00
Andras Bacsai
05fb26a49b remove console logs 2023-03-07 11:15:43 +01:00
Andras Bacsai
1c237affb4 feat: add host path to any container 2023-03-07 11:15:05 +01:00
Andras Bacsai
3e81d7e9cb fix: replace . & .. & $PWD with ~ 2023-03-07 10:44:53 +01:00
David Koch Gregersen
edb66620c1 Adding a check when reading ssh config file
Also adds comments to the createRemoteEngineConfiguration function
2023-03-07 10:43:34 +01:00
Andras Bacsai
04f7e8e777 fix: host volumes 2023-03-07 10:31:10 +01:00
David Koch Gregersen
eee201013c Fixing multiple remotes breaking the server overview 2023-03-06 22:31:01 +01:00
Andras Bacsai
1190cb4ea1 Update deployApplication.ts 2023-03-04 18:49:34 +01:00
Andras Bacsai
507100ea0b Update package.json 2023-03-04 18:48:38 +01:00
Andras Bacsai
9b13912b6d Update common.ts 2023-03-04 18:48:28 +01:00
Andras Bacsai
ee65deebfd fix: nestjs buildpack 2023-03-04 18:05:01 +01:00
Andras Bacsai
ba9fa442d1 Merge pull request #973 from coollabsio/next
v3.12.23
2023-03-04 15:11:42 +01:00
Andras Bacsai
87da27f9bf fix: publishDirectory 2023-03-04 15:06:35 +01:00
Andras Bacsai
b5bc5fe2c6 Merge pull request #965 from coollabsio/next
v3.12.22
2023-03-03 09:13:56 +01:00
Andras Bacsai
d2329360d0 Merge pull request #950 from hemangjoshi37a/main
added `star-history`
2023-03-02 17:33:22 +01:00
Andras Bacsai
7ece0ae10a Merge pull request #955 from eltociear/patch-1
fix typo in _GitlabRepositories.svelte
2023-03-02 17:32:26 +01:00
Andras Bacsai
f931b47eb8 Merge pull request #957 from addianto/fix/pack
Fix PACK_VERSION build argument in Dockerfile
2023-03-02 17:31:55 +01:00
Andras Bacsai
7f7eb12ded fix: empty port in docker compose 2023-03-02 17:22:49 +01:00
Andras Bacsai
c0940f7a19 fix: cannot delete resource when you are not on root team 2023-03-02 17:12:29 +01:00
Andras Bacsai
9dfde11e35 possible fix: vaultwarden 2023-03-02 16:56:44 +01:00
Andras Bacsai
6f15cc2dbc fix: base directory not found 2023-03-02 16:52:55 +01:00
Daya Adianto
120308638f fix: set PACK_VERSION to 0.27.0
This commit removes the `v` prefix in the version identifier assigned to
PACK_VERSION build argument. The `pack` script actually available on
Coolify's CDN, but named without `v` prefix in the script's version identifier.

Related issue: #689, which reported that the `pack` script in the
container image is a HTML 404 file instead of the actual `pack`
executable.
2023-02-25 17:27:41 +07:00
Ikko Eltociear Ashimine
1d04ef99bb fix typo in _GitlabRepositories.svelte
occured -> occurred
2023-02-24 11:24:55 +09:00
Hemang Joshi
9b00d177ef added star-history
added `star-history`
2023-02-22 16:21:32 +05:30
Andras Bacsai
884524c448 Merge pull request #945 from coollabsio/next
v3.12.21
2023-02-21 13:55:29 +01:00
Andras Bacsai
3ae1e7e87d remove debug 2023-02-21 13:47:28 +01:00
Andras Bacsai
81f885311d debug 2023-02-21 13:24:23 +01:00
Andras Bacsai
d9362f09d8 debug 2023-02-21 13:23:34 +01:00
Andras Bacsai
906d181d1b debug 2023-02-21 13:15:17 +01:00
Andras Bacsai
44b8812a7b debug 2023-02-21 13:08:14 +01:00
Andras Bacsai
3308c45e88 Merge pull request #943 from coollabsio/next
v3.12.21
2023-02-21 13:02:39 +01:00
Andras Bacsai
e530ecf9f9 fix 2023-02-21 12:59:21 +01:00
Andras Bacsai
51b5edb04f hmm fix 2023-02-21 12:48:06 +01:00
Andras Bacsai
f0d89f850e fix 2023-02-21 12:45:22 +01:00
Andras Bacsai
b777e08542 fix: arm servics 2023-02-21 12:35:20 +01:00
Andras Bacsai
2e485df530 Merge pull request #935 from coollabsio/next
v3.12.20
2023-02-20 12:03:57 +01:00
Andras Bacsai
3c37d22a6e typo 2023-02-20 11:55:08 +01:00
Andras Bacsai
08ab7a504a fix: applications cannot be deleted 2023-02-20 11:54:43 +01:00
Andras Bacsai
06563ef921 add latest tag with prod release 2023-02-20 10:23:31 +01:00
Andras Bacsai
34f6210bc0 Merge pull request #931 from coollabsio/next
v3.12.19
2023-02-20 10:04:37 +01:00
Andras Bacsai
0bbde0c605 add ccareer logo 2023-02-20 09:40:54 +01:00
Andras Bacsai
a8f24fd1b7 update icon 2023-02-20 09:11:05 +01:00
Andras Bacsai
c3e0237696 Merge pull request #909 from scshiv29-dev/main
changed copypassword fields in databases
2023-02-20 09:06:35 +01:00
Andras Bacsai
bb6925920f fix: escape new line chars in wp custom configs 2023-02-17 15:00:29 +01:00
Andras Bacsai
63ec2a33ae fix: network in compose files 2023-02-17 14:45:13 +01:00
Andras Bacsai
c89a959fe8 remove debug 2023-02-17 14:35:42 +01:00
Andras Bacsai
150b50e0ba isarm simplification 2023-02-17 14:20:17 +01:00
Andras Bacsai
4ef824f665 typo fix 2023-02-17 14:16:12 +01:00
Andras Bacsai
5a56cca0aa debug a thing 2023-02-17 14:09:15 +01:00
Andras Bacsai
b9189d7647 readd compose icon 2023-02-17 13:34:21 +01:00
Andras Bacsai
20226c914b update templates 2023-02-17 13:29:14 +01:00
Andras Bacsai
434e7f8a09 fix versions 2023-02-17 13:11:50 +01:00
Andras Bacsai
a29d733a02 Merge pull request #924 from jloewe/main
fix: gitlab personal repo listing
2023-02-17 13:06:46 +01:00
Andras Bacsai
9abe4b967b package updates + remove local icons 2023-02-17 13:05:18 +01:00
Andras Bacsai
3b6a4ece0f reset production release 2023-02-17 13:00:31 +01:00
Andras Bacsai
28d2471b4d dump pocketbase version 2023-02-17 13:00:31 +01:00
Andras Bacsai
d122af9fed Merge pull request #891 from TetrisIQ/moveIconsInComunityRepo
Move icons in comunity repo
2023-02-17 13:00:02 +01:00
Andras Bacsai
77271f3856 Merge pull request #919 from rawalplawit/docfix
fix: typos in docs
2023-02-17 12:47:17 +01:00
Andras Bacsai
ededfb68a6 Merge pull request #926 from simonorzel26/main
Spelling correction README.md
2023-02-17 12:45:10 +01:00
Andras Bacsai
4a3affdd24 Merge pull request #930 from usr3/patch-1
fix: link 404 in contribution.md
2023-02-17 12:44:46 +01:00
Andras Bacsai
8f8ea120d3 test prod release 2023-02-17 12:15:17 +01:00
Andras Bacsai
0fa88009f8 remove rc gh action 2023-02-17 11:56:10 +01:00
Andras Bacsai
4375a807df remove trpc 2023-02-17 11:55:21 +01:00
usr3
b2d97c5908 Update link 2023-02-17 13:36:14 +05:30
usr3
ec89dd606d Fix link 404
Fix 404 for `Setup Docker Compose Plugin`
2023-02-17 13:09:16 +05:30
Simon O
198508a7c3 Update README.md 2023-02-15 17:58:14 +01:00
Jan Loewe
4845e986bb fix gitlab personal repo listing 2023-02-14 17:33:50 +00:00
Shivam Deepak Chaudhary
1da8a307fc fixded copyvolumefield css 2023-02-11 09:32:04 +00:00
Shivam Deepak Chaudhary
b4886e604e added copyvolumefield 2023-02-11 09:24:08 +00:00
rawalplawit
e84544136e fix: typos in docs 2023-02-10 14:54:16 +05:45
Shivam Deepak Chaudhary
ce70252a69 changed copypassword fields in databases 2023-02-09 07:51:33 +00:00
Andras Bacsai
5c56962ea1 Merge pull request #888 from Rados51/883
Fixes coollabsio/coolify#883
2023-02-08 15:00:57 +01:00
Andras Bacsai
d2ed53b946 Merge pull request #889 from Rados51/884
Fixes coollabsio/coolify#884
2023-02-08 14:59:24 +01:00
Andras Bacsai
a4da80b498 Merge pull request #894 from m0ddixx/main
feat: Add environment variables for proxy ports
2023-02-08 14:57:22 +01:00
Andras Bacsai
9bd01492b1 Merge pull request #896 from inluxc/PocketBase-v0.12.0
Update PocketBase to v0.12.2
2023-02-08 14:54:22 +01:00
Fulvio Carvalhido
da032941b4 Update version 0.12.2 2023-02-07 12:31:44 +00:00
Fulvio Carvalhido
c138fcc2e2 Update PocketBase to v0.12.0
Release:
https://github.com/pocketbase/pocketbase/releases/tag/v0.12.0
2023-01-30 13:18:03 +00:00
Nico Kranz
cbb69b0350 add env support for traefik ports 2023-01-30 11:04:22 +00:00
Alex
a8aed3354d fix: url 2023-01-28 21:02:16 +00:00
Alex
e8790a4d4c feat: remove svg support 2023-01-28 21:00:57 +00:00
Radoš
df6ef3aaa0 Fixes coollabsio/coolify#884 2023-01-27 18:52:23 +01:00
Radoš
2820d99f7b Fixes coollabsio/coolify#883 2023-01-27 18:35:34 +01:00
Alex
077aa4445a feat: github raw icon url 2023-01-24 20:04:43 +00:00
Andras Bacsai
23bfc119d9 Fix GH 2023-01-24 15:43:43 +01:00
Andras Bacsai
ab712ac637 version++ 2023-01-24 15:32:32 +01:00
Andras Bacsai
b056826e94 Fixing prod release 2023-01-24 15:31:26 +01:00
Andras Bacsai
6311627899 Merge pull request #882 from coollabsio/next
v3.12.18
2023-01-24 15:09:51 +01:00
Andras Bacsai
37cea5fb61 update mattermost + traefik 2023-01-24 14:48:03 +01:00
Andras Bacsai
655a8cd60d feat: able to use $$ in traefik config gen
fix: repman icon
2023-01-24 13:52:39 +01:00
Andras Bacsai
4c8babc96a version++ 2023-01-23 10:45:19 +01:00
Andras Bacsai
612bacebed fix: cleanupStuckedContainers 2023-01-23 10:37:28 +01:00
Andras Bacsai
ade7c8566d fix: cleanupStuckedContainers 2023-01-23 10:37:14 +01:00
Andras Bacsai
19553ce5c8 Merge pull request #874 from coollabsio/next
v3.12.17
2023-01-20 21:14:06 +01:00
Andras Bacsai
18ed2527e8 fix 2023-01-20 20:51:37 +01:00
Andras Bacsai
b0652bc884 Merge pull request #872 from coollabsio/next
Next
2023-01-20 14:07:46 +01:00
Andras Bacsai
15c9ad23fe fix: stucked containers 2023-01-20 14:06:55 +01:00
Andras Bacsai
578bb12562 test new release gh action 2023-01-20 14:05:07 +01:00
Andras Bacsai
f82cfda07f version++ 2023-01-20 13:55:05 +01:00
Andras Bacsai
9e52b2788d Pocketbase GH release updated 2023-01-20 13:49:39 +01:00
Andras Bacsai
2e56a113d9 Test GH release thing 2023-01-20 13:48:57 +01:00
Andras Bacsai
4722d777e6 Merge pull request #871 from coollabsio/next
v3.12.15
2023-01-20 13:22:11 +01:00
Andras Bacsai
2141d54ae0 fix 2023-01-20 13:15:52 +01:00
Andras Bacsai
e346225136 fix 2023-01-20 13:10:40 +01:00
Andras Bacsai
012d4dae56 testing 2023-01-20 11:15:38 +01:00
Andras Bacsai
b4d9fe70af fix 2023-01-20 10:43:21 +01:00
Andras Bacsai
85e83b5441 Test new gh actions 2023-01-20 10:42:13 +01:00
Andras Bacsai
6b2a453b8f fix: deletion + cleanupStuckedContainers 2023-01-20 10:10:36 +01:00
Andras Bacsai
27021538d8 fix: cleanup stucked containers 2023-01-20 09:40:29 +01:00
Andras Bacsai
8b57a2b055 fix: cleanup function 2023-01-20 09:26:48 +01:00
Andras Bacsai
75dd894685 Merge pull request #867 from coollabsio/next
v3.12.14
2023-01-19 14:36:05 +01:00
Andras Bacsai
9101ef8774 version++ 2023-01-19 14:33:33 +01:00
Andras Bacsai
5932540630 fix: www redirect 2023-01-19 14:33:20 +01:00
Andras Bacsai
ec376b2e47 Merge pull request #864 from coollabsio/next
v3.12.13
2023-01-18 19:00:03 +01:00
Andras Bacsai
a176562ad0 fix: secrets 2023-01-18 18:51:03 +01:00
Andras Bacsai
becf37b676 Merge pull request #858 from coollabsio/next
v3.12.12
2023-01-17 12:33:51 +01:00
Andras Bacsai
9b5efab8f8 fix: grpc 2023-01-17 11:51:53 +01:00
Andras Bacsai
e98a8ba599 traefik dashbord in dev 2023-01-17 11:12:52 +01:00
Andras Bacsai
7ddac50008 feat: http + h2c paralel 2023-01-17 11:12:42 +01:00
Andras Bacsai
9837ae359f feat: init h2c (http2/grpc) support 2023-01-17 10:35:04 +01:00
Andras Bacsai
710a829dcb version++ 2023-01-17 10:00:50 +01:00
Andras Bacsai
ccd84fa454 fix: build args docker compose 2023-01-17 10:00:27 +01:00
Andras Bacsai
335b36d3a9 Merge pull request #857 from zek/patch-2
Fix docker-compose build args
2023-01-17 09:26:12 +01:00
Talha Zekeriya Durmuş
2be30fae00 Handle string build parameter 2023-01-17 02:11:06 +01:00
Talha Zekeriya Durmuş
db5cd21884 Fix docker-compose build args 2023-01-17 02:04:01 +01:00
Andras Bacsai
bfd3020031 Merge pull request #853 from coollabsio/next
v3.12.11
2023-01-16 12:44:04 +01:00
Andras Bacsai
344c36997a fix: public gh repo reload compose 2023-01-16 12:36:59 +01:00
Andras Bacsai
dfd9272b70 version ++ 2023-01-16 12:17:48 +01:00
Andras Bacsai
359f4520f5 test template + tags during dev 2023-01-16 11:45:45 +01:00
Andras Bacsai
aecf014f4e Merge pull request #843 from zek/soketi-logo
Add Soketi Logo
2023-01-16 11:10:48 +01:00
Andras Bacsai
d2a89ddf84 Merge pull request #844 from zek/repman-logo
Add Repman logo
2023-01-16 11:10:35 +01:00
Andras Bacsai
c01fe153ae Merge pull request #847 from zek/mattermost
Add Matermost Logo
2023-01-16 11:10:14 +01:00
Andras Bacsai
4f4a838799 update templates+tags 2023-01-16 10:58:44 +01:00
Andras Bacsai
ac6f2567eb fix: build env variables with docker compose 2023-01-16 10:42:23 +01:00
Andras Bacsai
05a5816ac6 fix: do not cleanup compose applications as unconfigured 2023-01-16 10:22:14 +01:00
Andras Bacsai
9c8f6e9195 fix: delete apps with previews 2023-01-16 10:16:49 +01:00
Andras Bacsai
2fd001f6d2 fix: docker log sequence 2023-01-16 10:06:41 +01:00
Andras Bacsai
d641d32413 fix: compose file location 2023-01-16 09:48:15 +01:00
Andras Bacsai
18064ef6a2 fixes related to docker-compose 2023-01-16 09:44:08 +01:00
Andras Bacsai
5cb9216add wip: trpc 2023-01-13 15:50:20 +01:00
Andras Bacsai
91c36dc810 wip: trpc 2023-01-13 15:24:43 +01:00
Andras Bacsai
6efb02fa32 wip: trpc 2023-01-13 15:21:54 +01:00
Andras Bacsai
97313e4180 wip: trpc 2023-01-13 14:54:21 +01:00
Andras Bacsai
568ab24fd9 wip: trpc 2023-01-13 14:17:36 +01:00
Talha Zekeriya Durmuş
5a745efcd3 Add Matermost Logo 2023-01-13 02:38:33 +01:00
Andras Bacsai
c651570e62 wip: trpc 2023-01-12 16:50:17 +01:00
Andras Bacsai
8980598085 wip: trpc 2023-01-12 16:43:41 +01:00
Talha Zekeriya Durmuş
c07c742feb Add Repman logo 2023-01-12 01:45:13 +01:00
Talha Zekeriya Durmuş
1053abb9a9 Add Soketi Logo 2023-01-12 01:41:35 +01:00
Andras Bacsai
2c9e57cbb1 Merge pull request #841 from coollabsio/next
v3.12.10
2023-01-11 11:44:22 +01:00
Andras Bacsai
c6eaa2c8a6 update packages in api 2023-01-11 11:35:57 +01:00
Andras Bacsai
5ab5e913ee Merge pull request #840 from zek/patch-1
Fix: add missing variables
2023-01-11 11:33:17 +01:00
Talha Zekeriya Durmuş
cea53ca476 Fix: add missing variables 2023-01-11 11:12:44 +01:00
Andras Bacsai
58af09114b Merge pull request #834 from coollabsio/next
v3.12.9
2023-01-11 11:00:56 +01:00
Andras Bacsai
c4c0417e2d new pocketbase 2023-01-11 10:55:55 +01:00
Andras Bacsai
74f90e6947 Merge pull request #838 from zek/patch-1
Add Build Time Secrets for Laravel
2023-01-11 10:12:59 +01:00
Andras Bacsai
ad5c339780 fix 2023-01-11 10:11:32 +01:00
Andras Bacsai
305823db00 fix: secrets 2023-01-11 09:29:59 +01:00
Talha Zekeriya Durmuş
baf58b298f Add Build Time Secrets 2023-01-11 01:43:43 +01:00
Andras Bacsai
c37367d018 add directus 2023-01-10 15:30:10 +01:00
Andras Bacsai
1c98796e64 new templates + tags + dev mode updated 2023-01-10 13:24:04 +01:00
Andras Bacsai
e686d9a6ea add lock file 2023-01-10 13:01:37 +01:00
Andras Bacsai
a1936b9d59 update jsonwebtoken 2023-01-10 13:01:03 +01:00
Andras Bacsai
834f9c9337 template updates 2023-01-10 13:01:03 +01:00
Andras Bacsai
615f8cfd3b feat: handle invite_only plausible analytics 2023-01-10 13:01:03 +01:00
Andras Bacsai
8ed134105f remove console.log 2023-01-10 13:01:03 +01:00
Andras Bacsai
5d6169b270 Merge pull request #781 from kaname-png/libretranslate
feat(ui): add libretranslate service icon
2023-01-10 12:57:05 +01:00
Andras Bacsai
e83de8b938 fix: local images for reverting 2023-01-10 12:24:22 +01:00
Andras Bacsai
ee55e039b2 Merge pull request #798 from hyddeos/main
fix the console error on Documentation hover
2023-01-10 11:52:57 +01:00
Andras Bacsai
086dd89144 fix: temporary disable dns check with dns servers 2023-01-10 11:50:41 +01:00
Andras Bacsai
68e5d4dd2c fix: doc link 2023-01-10 11:35:10 +01:00
Andras Bacsai
55a35c6bec fix: remove prefetches 2023-01-10 11:31:44 +01:00
Andras Bacsai
d09b4885fe Merge pull request #784 from hyddeos/tool-tip
Change color for Tooltip on hover
2023-01-10 11:29:54 +01:00
Andras Bacsai
a46773e6d8 Merge branch 'next' into tool-tip 2023-01-10 11:29:36 +01:00
Andras Bacsai
a422d0220c fix: add documentation link again 2023-01-10 11:27:43 +01:00
Andras Bacsai
e5eba8430a Merge pull request #783 from hyddeos/doc-in-mob-menu
Add link to the Documentation in the mobile menu
2023-01-10 11:26:42 +01:00
Andras Bacsai
3d235dc316 Merge pull request #794 from TetrisIQ/main
feat: adding icon for whoogle
2023-01-10 11:19:42 +01:00
Andras Bacsai
80d3b4be8c Merge pull request #825 from MrSquaare/feature/openblocks-service
feat: add Openblocks icon
2023-01-10 11:19:13 +01:00
Andras Bacsai
fe8b7480df Merge pull request #836 from coollabsio/feat/git-source-custom-user
fix: custom gitlab git user
2023-01-10 11:17:42 +01:00
Andras Bacsai
cebfc3aaa0 Merge pull request #804 from titouanmathis/feat/git-source-custom-user
feat(git-source): Add support for custom SSH user for GitLab self-hosted
2023-01-10 11:17:01 +01:00
Andras Bacsai
f778b5a12d fix: custom gitlab git user 2023-01-10 11:15:21 +01:00
Andras Bacsai
2244050160 Merge pull request #816 from Yarmeli/main
[Bug] Fixed issue with docker-compose not loading for Gitlab instances
2023-01-10 10:56:03 +01:00
Andras Bacsai
9284e42b62 fix: $ sign in secrets 2023-01-10 10:52:40 +01:00
Andras Bacsai
ee40120496 fix: read-only iam 2023-01-10 10:26:11 +01:00
Andras Bacsai
30cd2149ea fix: read-only permission 2023-01-10 10:15:03 +01:00
Andras Bacsai
395df36d57 chore: version++ 2023-01-10 09:57:27 +01:00
Andras Bacsai
79597ea0e5 fix: parsing secrets 2023-01-10 09:57:01 +01:00
Guillaume Bonnet
283f39270a feat: add Openblocks icon 2023-01-05 12:26:50 +00:00
Andras Bacsai
7d892bb19d esbuild 2022-12-29 22:33:31 +01:00
Yarmeli
a025f124f3 Updated index.svelte with the same changes from +page.svelte 2022-12-29 19:00:11 +00:00
Yarmeli
84f7287bf8 Fixed issue unable to find the docker compose file 2022-12-29 18:54:54 +00:00
Andras Bacsai
a58544b502 Merge pull request #813 from coollabsio/next
v3.12.8
2022-12-27 21:10:41 +01:00
Andras Bacsai
4d26175ebe omg, what have I done.. 2022-12-27 21:02:04 +01:00
Andras Bacsai
78f0e6ff6b Update package.json 2022-12-27 14:22:49 +01:00
Andras Bacsai
3af97af634 Update common.ts 2022-12-27 14:22:29 +01:00
Andras Bacsai
2c2663c8a4 Update common.ts 2022-12-27 14:20:19 +01:00
Andras Bacsai
1122b8a2f7 Update index.ts 2022-12-27 13:48:19 +01:00
Andras Bacsai
5b9f38948b Update index.ts 2022-12-27 13:46:33 +01:00
Andras Bacsai
507eb3b424 Update common.ts 2022-12-27 13:40:00 +01:00
Andras Bacsai
56fbc0ed6c Update package.json 2022-12-27 13:39:38 +01:00
Andras Bacsai
7aaad314e3 Update common.ts 2022-12-27 13:39:03 +01:00
Andras Bacsai
356949dd54 Merge pull request #811 from coollabsio/next
v3.12.5
2022-12-26 21:51:09 +01:00
Andras Bacsai
9878baca53 Update index.ts 2022-12-26 21:29:54 +01:00
Andras Bacsai
9cbc7c2939 Merge pull request #809 from Tiagofv/main
Fix bug: value.environment is not iterable
2022-12-26 21:19:11 +01:00
Andras Bacsai
4680b63911 fix: cleanupstorage 2022-12-26 21:17:53 +01:00
Tiago Braga
ce4a2d95f2 fix: remove unused imports 2022-12-24 16:28:02 -03:00
Tiago Braga
b2e048de8d Fix: conditional on environment 2022-12-24 16:27:10 -03:00
Andras Bacsai
d25a9d7515 devtemplates update 2022-12-22 11:31:46 +01:00
Andras Bacsai
dc130d3705 new pocketbase version 2022-12-22 11:22:37 +01:00
Titouan Mathis
2391850218 Add support for custom SSH user for GitLab self-hosted 2022-12-21 15:10:51 +01:00
Andras Bacsai
c8f7ca920e wip: trpc 2022-12-21 15:06:33 +01:00
Andras Bacsai
e3e39af6fb remove console.log 2022-12-21 14:11:07 +01:00
Andras Bacsai
f38114f5a5 Merge pull request #802 from coollabsio/next
v3.12.4
2022-12-21 13:25:46 +01:00
Andras Bacsai
1ee9d041df fix: duplicate env variables 2022-12-21 13:24:30 +01:00
Andras Bacsai
9c6f412f04 wip: trpc 2022-12-21 13:06:44 +01:00
Andras Bacsai
4fa0f2d04a fix: gh actions 2022-12-21 12:47:42 +01:00
Andras Bacsai
e566a66ea4 test 2022-12-21 12:33:27 +01:00
Andras Bacsai
58a42abc67 test 2022-12-21 11:40:47 +01:00
Andras Bacsai
5676bd9d0d test 2022-12-21 11:24:19 +01:00
Andras Bacsai
9691010e7b test 2022-12-21 11:11:55 +01:00
Andras Bacsai
d19be3ad52 Merge pull request #801 from coollabsio/next
v3.12.3
2022-12-21 10:53:09 +01:00
Andras Bacsai
ec3cbf788b fix: secrets 2022-12-21 10:40:27 +01:00
Andras Bacsai
1282fd0b76 fix: secrets 2022-12-21 10:11:03 +01:00
Andras Bacsai
93430e5607 fix: add default node_env variable 2022-12-19 23:07:01 +01:00
Andras Bacsai
14201f4052 fix: add default node_env variable 2022-12-19 22:15:00 +01:00
Andras Bacsai
47979bf16d fix: secrets 2022-12-19 22:11:21 +01:00
Andras Bacsai
29530f3b17 fix: secrets with newline 2022-12-19 21:48:31 +01:00
Eric
af548e6ef8 Change of link "rel" to "external"
To prevent hover console-error like on the documentations-icon in the desktop mode
2022-12-19 20:33:22 +01:00
hyddeos
ed24a9c990 fix the consol error on documentation hover 2022-12-19 20:02:24 +01:00
Andras Bacsai
cb1d86d08b Merge pull request #796 from coollabsio/next
v3.12.2
2022-12-19 11:59:03 +01:00
Andras Bacsai
88f3f628ef fix: docker buildpack env 2022-12-19 11:51:44 +01:00
Andras Bacsai
295bea37bc fix: envs 2022-12-19 11:01:29 +01:00
Andras Bacsai
bd7d756254 fix: escape env vars 2022-12-19 10:22:11 +01:00
Andras Bacsai
4261147fe8 fix: escape secrets 2022-12-19 10:04:28 +01:00
Andras Bacsai
a70adc5eb3 fix: root user for dbs on arm 2022-12-19 09:52:50 +01:00
Alex
0d51b04d79 feat: adding icon for whoogle 2022-12-18 14:13:49 +01:00
Andras Bacsai
06d40b8a81 debug secret problem 2022-12-14 12:34:13 +01:00
Andras Bacsai
2358510cba Update --bug-report.yaml 2022-12-13 21:30:52 +01:00
Andras Bacsai
e6d13cb7d7 Update --bug-report.yaml 2022-12-13 21:29:42 +01:00
Andras Bacsai
39e21c3f36 chore: version++ 2022-12-13 13:32:50 +01:00
Andras Bacsai
8da900ee72 fix: do not replace secret 2022-12-13 13:32:11 +01:00
Andras Bacsai
9f4e81a1a3 wip: trpc 2022-12-13 13:22:45 +01:00
Andras Bacsai
0b918c2f51 wip: trpc 2022-12-13 13:17:32 +01:00
Andras Bacsai
085cd2a314 wip: trpc 2022-12-13 13:15:23 +01:00
Andras Bacsai
98d2399568 wip: trpc 2022-12-13 13:13:28 +01:00
Andras Bacsai
515d9a0008 wip: trpc 2022-12-13 13:11:49 +01:00
Andras Bacsai
aece1fa7d3 wip: trpc 2022-12-13 13:04:47 +01:00
Andras Bacsai
abc614ecfd wip: trpc 2022-12-13 12:54:57 +01:00
Andras Bacsai
1180d3fdde wip: trpc 2022-12-13 12:47:14 +01:00
Andras Bacsai
1639d1725a Merge branch 'main' into next 2022-12-13 09:35:33 +01:00
Andras Bacsai
5df1deecbc update templates and tags 2022-12-13 09:34:42 +01:00
Andras Bacsai
fe3c0cf76e fix: appwrite tmp volume 2022-12-13 09:17:33 +01:00
Andras Bacsai
cc0df0182c Merge pull request #785 from jugglingjsons/main
fix: adding missing appwrite volume
2022-12-13 09:15:11 +01:00
Andras Bacsai
eb354f639f chore: version++ 2022-12-13 09:05:11 +01:00
jugglingjsons
02c530dcbe fix: adding missing appwrite volume 2022-12-12 19:11:40 +01:00
hyddeos
379b1de64f change color for Tooltip on hover 2022-12-12 18:24:13 +01:00
hyddeos
f3ff324925 fixed size on icon 2022-12-12 17:50:35 +01:00
hyddeos
0f2160222f Add Documents link to Mobile-Menu 2022-12-12 17:45:04 +01:00
hyddeos
ce3750c51c Add link document to Mobile-menu 2022-12-12 17:38:45 +01:00
Kaname
72a7ea6e91 feat(ui): add libretranslate service icon 2022-12-12 15:48:23 +00:00
Andras Bacsai
4ad7e1f8e6 wip 2022-12-12 16:04:41 +01:00
Andras Bacsai
2007ba0c3b fix: build commands 2022-12-12 14:52:56 +01:00
Andras Bacsai
2009dc11db wip trpc 2022-12-12 14:48:56 +01:00
Andras Bacsai
62f2196a0c Merge branch 'next' into trpc 2022-12-12 09:38:38 +01:00
Andras Bacsai
e63c65da4f Merge pull request #775 from hyddeos/main
Add link to the documentation
2022-12-12 09:11:18 +01:00
Andras Bacsai
570a082227 Merge pull request #776 from rickaard/close-sidedrawer
Close the sidedrawer when clicking a link in mobile view
2022-12-12 09:10:31 +01:00
Andras Bacsai
9b1ede3a59 fix: migration file 2022-12-12 09:08:00 +01:00
Andras Bacsai
c445fc0f8a wip 2022-12-12 08:44:23 +01:00
Rickard Jonsson
699493cf24 Make sure sidedrawer is closed on link click 2022-12-11 20:59:02 +01:00
hyddeos
6c89686f31 Add link to the documentation 2022-12-11 20:32:15 +01:00
Andras Bacsai
f55b861849 fix: cleanup 2022-12-09 14:32:22 +01:00
Andras Bacsai
adf82c04ad Merge pull request #747 from coollabsio/next
v3.12.0
2022-12-09 14:29:33 +01:00
Andras Bacsai
1b80956fe8 fix: public db icon on dashboard 2022-12-09 14:08:21 +01:00
Andras Bacsai
de9da8caf9 fix 2022-12-09 13:59:43 +01:00
Andras Bacsai
967f42dd89 add shell to some cmds 2022-12-09 13:46:06 +01:00
Andras Bacsai
95e8b29fa2 fix: wrong port in case of docker compose 2022-12-09 11:21:25 +01:00
Andras Bacsai
2e3c815e53 fix: delete resource on dashboard 2022-12-07 15:27:26 +01:00
Andras Bacsai
132707caa7 fix: rde 2022-12-07 14:46:12 +01:00
Andras Bacsai
0dad616c38 fixes 2022-12-07 13:45:56 +01:00
Andras Bacsai
c0882dffde Merge pull request #766 from twisttaan/feat/name-label
feat(api): name label
2022-12-07 13:45:11 +01:00
Andras Bacsai
5e082c647c fixes 2022-12-07 12:17:06 +01:00
Tristan Camejo
285c3c2f5d feat(api): name label 2022-12-07 00:38:08 +00:00
Andras Bacsai
dcb29a80fe fix 2022-12-06 10:29:14 +01:00
Andras Bacsai
b45ad19732 fix: security hole 2022-12-06 10:27:51 +01:00
Andras Bacsai
f12d453b5f backups... backups everywhere 2022-12-02 14:34:06 +01:00
Andras Bacsai
8a00b711be add pocketbase 2022-12-02 10:00:27 +01:00
Andras Bacsai
56204efc7a update workflow 2022-12-02 09:44:29 +01:00
Andras Bacsai
da638c270f infra: pocketbase release 2022-12-02 09:41:22 +01:00
Andras Bacsai
ad4b974274 fix: turn off autodeploy for simpledockerfiles 2022-12-01 16:50:54 +01:00
Andras Bacsai
943a05edcc fixes 2022-12-01 16:29:38 +01:00
Andras Bacsai
1a28e65e50 feat: revert to remote image 2022-12-01 15:51:18 +01:00
Andras Bacsai
cd3af7fa39 fix: failed builds should not push images 2022-12-01 15:05:21 +01:00
Andras Bacsai
8ccb0c88db feat: able to push image to docker registry 2022-12-01 14:39:02 +01:00
Andras Bacsai
127880cf8d schema prettify 2022-12-01 13:29:45 +01:00
Andras Bacsai
2e56086661 feat: simpleDockerfile deployment 2022-12-01 12:58:45 +01:00
Andras Bacsai
a129be0dbd fixes 2022-12-01 10:23:43 +01:00
Andras Bacsai
12c0760cb3 fixes 2022-12-01 09:51:56 +01:00
Andras Bacsai
9d3ed85ffd haha 2022-11-30 15:50:45 +01:00
Andras Bacsai
850d57d0d2 fix text haha 2022-11-30 15:49:39 +01:00
Andras Bacsai
7981bec1ed text changes 2022-11-30 15:47:54 +01:00
Andras Bacsai
76373a8597 feat: save application data before deploying 2022-11-30 15:40:27 +01:00
Andras Bacsai
9913e7b70b feat: specific git commit deployment
feat: revert to specific image
fix: no system wide docker registries
2022-11-30 15:22:07 +01:00
Andras Bacsai
a08bb25bfa fix: static for arm 2022-11-30 11:45:39 +01:00
Andras Bacsai
28ec164bc2 fix: update PR/MRs with new previewSeparator 2022-11-30 11:36:05 +01:00
Andras Bacsai
3d5ea8629c fix: apache on arm 2022-11-30 11:18:19 +01:00
Andras Bacsai
4aaf59d034 update templates and tags 2022-11-30 11:07:44 +01:00
Andras Bacsai
14850476c7 feat: able to host static/php sites on arm 2022-11-30 11:00:03 +01:00
Andras Bacsai
bf5b6170fa remove console log 2022-11-29 15:47:25 +01:00
Andras Bacsai
6f91591448 fix: webhook previewseparator 2022-11-29 15:45:18 +01:00
Andras Bacsai
3c723bcba2 fix: remove sentry before migration 2022-11-29 15:13:05 +01:00
Andras Bacsai
e7dd13cffa fix: git checkout 2022-11-29 15:10:34 +01:00
Andras Bacsai
ad91630faa fix: remove beta from systemwide git 2022-11-29 15:05:31 +01:00
Andras Bacsai
57f746b584 fix: login error 2022-11-29 14:55:40 +01:00
Andras Bacsai
a55720091c fix: prevent webhook errors to be logged 2022-11-29 14:50:24 +01:00
Andras Bacsai
b461635834 debug 2022-11-29 14:44:53 +01:00
Andras Bacsai
1375580651 fix: migrations 2022-11-29 14:01:19 +01:00
Andras Bacsai
3d20433ad1 feat: sentry frontend 2022-11-29 13:59:03 +01:00
Andras Bacsai
58447c6456 update migration 2022-11-29 13:39:00 +01:00
Andras Bacsai
c6273e9177 feat: custom previewseparator 2022-11-29 13:29:11 +01:00
Andras Bacsai
ffdc158d44 fix: only visible with publicrepo 2022-11-29 13:13:04 +01:00
Andras Bacsai
876c81fad8 fix: ui 2022-11-29 13:00:44 +01:00
Andras Bacsai
028ee6d7b1 feat: deploy specific commit for apps
feat: keep number of images locally to revert quickly
2022-11-29 11:47:20 +01:00
Andras Bacsai
ec00548f1b feat: system wide git out of beta 2022-11-29 10:53:05 +01:00
Andras Bacsai
c4dc03e4a8 Merge pull request #700 from ThallesP/main
feature: initial support for specific git commit
2022-11-29 10:52:21 +01:00
Andras Bacsai
3a510a77ec Merge branch 'next' into main 2022-11-29 10:50:00 +01:00
Andras Bacsai
98a785fced tags 2022-11-29 10:36:19 +01:00
Andras Bacsai
c48654160d fixes 2022-11-29 10:35:56 +01:00
Andras Bacsai
55b80132c4 fixes 2022-11-29 09:43:28 +01:00
Andras Bacsai
1f0c168936 fixes 2022-11-29 09:42:36 +01:00
Andras Bacsai
6715bc750f Merge pull request #721 from gabrielengel/g-i18n
Starting translations work
2022-11-29 09:24:52 +01:00
Andras Bacsai
04a48a626b Merge pull request #746 from gabrielengel/refactor-servers
Componentization of /servers and /sources (depends on badges merge)
2022-11-29 09:22:08 +01:00
Andras Bacsai
2f9f0da7c6 Merge pull request #745 from gabrielengel/new-badges
New Badges components: destination, public, status, teams
2022-11-29 09:21:30 +01:00
Andras Bacsai
513c4f9e29 fixes 2022-11-29 09:19:10 +01:00
Andras Bacsai
3f078517a0 fix: dnt 2022-11-28 14:29:14 +01:00
Andras Bacsai
37036f0fca fix: sentry dsn update 2022-11-28 13:57:18 +01:00
Andras Bacsai
5789aadb5c feat: do not track in settings 2022-11-28 13:55:49 +01:00
Andras Bacsai
a768ed718a update sentry 2022-11-28 12:56:43 +01:00
Andras Bacsai
9c6092f31f fix: seed 2022-11-28 12:53:44 +01:00
Andras Bacsai
40d294a247 feat: add default sentry 2022-11-28 12:02:10 +01:00
Andras Bacsai
72844e4edc feat: save doNotTrackData to db 2022-11-28 11:48:38 +01:00
Andras Bacsai
db0a71125a version++ 2022-11-28 11:28:54 +01:00
Andras Bacsai
da244af39d fixes 2022-11-28 11:27:03 +01:00
Andras Bacsai
067f502d3c feat: custom docker compose file location in repo 2022-11-28 10:21:11 +01:00
Andras Bacsai
fffc6b1e4e feat: docker registries working 2022-11-25 15:44:11 +01:00
Andras Bacsai
9121c6a078 fix: 0 destinations redirect after creation 2022-11-25 15:43:59 +01:00
Andras Bacsai
9c4e581d8b feat: use registry for building 2022-11-25 14:29:01 +01:00
Andras Bacsai
dfadd31f46 Merge pull request #748 from zarxor/main
Typing error in CONTRIBUTION.md
2022-11-25 13:08:16 +01:00
Johan Boström
0cfa6fff43 Typing error in CONTRIBUTION.md 2022-11-23 21:00:01 +01:00
Andras Bacsai
d61671c1a0 wip 2022-11-23 15:44:30 +01:00
Andras Bacsai
d4f10a9af3 feat: custom/private docker registries 2022-11-23 14:39:30 +01:00
Andras Bacsai
03861af893 fix: nope in database strings 2022-11-23 13:40:10 +01:00
Andras Bacsai
ae531c445d fix: remove hardcoded sentry dsn 2022-11-23 13:39:16 +01:00
Andras Bacsai
4b26aeef9a fix: remote haproxy password/etc 2022-11-23 13:39:16 +01:00
Andras Bacsai
1e47b79b50 chore: version++ 2022-11-23 13:39:16 +01:00
Andras Bacsai
0c223dcec4 Merge pull request #698 from themarkwill/fix/errorInBaseApi
fix: Accept logged and not logged user in /base
2022-11-23 13:36:39 +01:00
Andras Bacsai
0f4536c3d3 Merge pull request #744 from coollabsio/next
v3.11.13
2022-11-23 13:08:13 +01:00
Andras Bacsai
f43c584463 prettify 2022-11-23 13:07:45 +01:00
Gabriel Engel
91c558ec83 Componentization of /servers and /sources 2022-11-23 08:17:03 -03:00
Gabriel Engel
9d45ab3246 New Badges components: destination, public, status, teams + container/status 2022-11-23 07:52:59 -03:00
Andras Bacsai
34ff6eb567 fix: load logs after build failed 2022-11-23 11:51:19 +01:00
Andras Bacsai
8793c00438 fix: mounts 2022-11-23 11:48:31 +01:00
Andras Bacsai
d7981d5c3e fix: logs 2022-11-23 11:48:04 +01:00
Andras Bacsai
bcaae3b67b debug off
fix: logging
2022-11-23 11:37:52 +01:00
Andras Bacsai
046d9f9597 debug 2022-11-23 11:24:15 +01:00
Andras Bacsai
81bd0301d2 fix: hasura admin secret 2022-11-23 11:18:25 +01:00
Andras Bacsai
530e7e494f fix: storage for compose bp + debug on 2022-11-23 10:57:52 +01:00
Andras Bacsai
d402fd5690 fix: move debug log settings to build logs 2022-11-23 10:28:36 +01:00
Andras Bacsai
eebec3b92f fix: escape % in secrets 2022-11-23 10:17:09 +01:00
Andras Bacsai
211c6585fa chore: version++ 2022-11-22 13:17:25 +01:00
Andras Bacsai
e1b5c40ca0 update templates 2022-11-22 13:17:09 +01:00
Andras Bacsai
747a9b521b fix: wrong icons on dashboard 2022-11-22 13:16:47 +01:00
Andras Bacsai
c2d72ad309 Merge pull request #742 from coollabsio/next
v3.11.12
2022-11-22 11:20:40 +01:00
Andras Bacsai
596181b622 update packages 2022-11-22 10:55:52 +01:00
Andras Bacsai
77c5270e1e chore: version++ 2022-11-22 10:47:21 +01:00
Andras Bacsai
a663c14df8 fix: exposed ports 2022-11-22 10:47:02 +01:00
Andras Bacsai
3bd9f00268 Merge pull request #741 from coollabsio/next
v3.11.11
2022-11-21 22:03:07 +01:00
Andras Bacsai
1aadda735d fix: webhook traefik 2022-11-21 21:58:07 +01:00
Andras Bacsai
12035208e2 fix: replace $$generate vars 2022-11-21 21:54:21 +01:00
Andras Bacsai
df8a9f673c fix: gh actions 2022-11-18 14:49:20 +01:00
Andras Bacsai
aa5c8a2c56 fix: gh actions 2022-11-18 14:48:31 +01:00
Andras Bacsai
a84540e6bb fix: gitea icon is svg 2022-11-18 14:47:23 +01:00
Andras Bacsai
fb91b64063 Merge pull request #730 from quiint/patch-1
Create Gitea icon
2022-11-18 14:45:01 +01:00
Andras Bacsai
94cc77ebca feat: only show expose if no proxy conf defined in template 2022-11-18 14:33:58 +01:00
Andras Bacsai
aac6981304 fix: no variables in template
feat: hostPort proxy conf from template
2022-11-18 14:28:05 +01:00
Andras Bacsai
ca05828b68 ga fixes 2022-11-18 11:21:41 +01:00
Andras Bacsai
8ec6b4c59c ga fixes 2022-11-18 11:19:15 +01:00
Andras Bacsai
f1be5f5341 ga fixes 2022-11-18 11:17:04 +01:00
Andras Bacsai
714c264002 fluentbit github release 2022-11-18 11:07:52 +01:00
Andras Bacsai
eca58097ef Merge pull request #733 from coollabsio/next
v3.11.10
2022-11-16 14:24:54 +01:00
Andras Bacsai
281146e22b chore: version++ 2022-11-16 12:46:29 +00:00
Andras Bacsai
f3a19a5d02 fix: wrong template/type 2022-11-16 12:40:44 +00:00
Andras Bacsai
9b9b6937f4 fix: local dev api/ws urls 2022-11-16 12:40:28 +00:00
Andras Bacsai
f54c0b7dff fix: isBot issue 2022-11-15 19:13:46 +00:00
Quiint
36c58ad286 Create gitea.svg 2022-11-14 09:54:46 -05:00
Andras Bacsai
a67f633259 Merge pull request #726 from coollabsio/next
v3.11.8
2022-11-14 14:24:52 +01:00
Andras Bacsai
f39a607c1a fix: default icon for new services 2022-11-14 13:54:06 +01:00
Andras Bacsai
0cc67ed2e5 update embeded templates 2022-11-14 13:46:17 +01:00
Andras Bacsai
5f8402c645 Merge pull request #727 from ksmithdev/main
Create keycloak.png
2022-11-14 12:59:29 +01:00
Andras Bacsai
3ab87cd11e ui: reload compose loading 2022-11-14 11:53:53 +01:00
Andras Bacsai
d5620d305d fix: ports for services 2022-11-14 11:49:32 +01:00
Andras Bacsai
35ebc5e842 fix: empty secrets on UI 2022-11-14 11:37:36 +01:00
Andras Bacsai
66276be1d2 fix: volume names for undefined volume names in compose 2022-11-14 11:26:12 +01:00
Andras Bacsai
47c0d522db chore: version++ 2022-11-14 11:00:25 +01:00
Andras Bacsai
b654883d1a ui: fixes 2022-11-14 10:59:19 +01:00
Andras Bacsai
b4f9d29129 fix: application persistent storage things 2022-11-14 10:40:28 +01:00
Andras Bacsai
bec6b961f3 fix: docker compose persistent volumes 2022-11-14 09:11:02 +01:00
Kyle Smith
2ce8f34306 Create keycloak.png 2022-11-11 14:03:05 -05:00
Andras Bacsai
30d1ae59ec revert: revert: revert 2022-11-11 14:25:02 +01:00
Andras Bacsai
ac7d4e3645 fix: getTemplates 2022-11-11 14:19:42 +01:00
Andras Bacsai
868c4001f6 gh action: revert 2022-11-11 14:17:53 +01:00
Andras Bacsai
e99c44d967 gh actions: update prod release flow 2022-11-11 13:41:02 +01:00
Andras Bacsai
48a877f160 Merge pull request #725 from coollabsio/next
v3.11.7
2022-11-11 13:33:57 +01:00
Andras Bacsai
cea894a8bd fix: dashboard error 2022-11-11 13:28:37 +01:00
Andras Bacsai
087e7b9311 Merge pull request #724 from coollabsio/next
v3.11.6
2022-11-11 11:58:46 +01:00
Andras Bacsai
39ba498293 ui: fix 2022-11-11 10:39:01 +01:00
Andras Bacsai
fe7390bd4d fix: update on mobile 2022-11-11 10:38:30 +01:00
Andras Bacsai
75af551435 ui: secrets on apps 2022-11-11 09:33:45 +01:00
Andras Bacsai
ae2d3ebb48 fix: no tags error 2022-11-11 09:25:02 +01:00
Andras Bacsai
5ff6c53715 Merge pull request #723 from coollabsio/next
v3.11.5
2022-11-11 08:28:37 +01:00
Andras Bacsai
3c94723b23 fix: show rollback button loading 2022-11-10 15:43:28 +01:00
Andras Bacsai
c6a2e3e328 update tags 2022-11-10 15:34:33 +01:00
Andras Bacsai
2dc5e10878 update tags 2022-11-10 15:33:57 +01:00
Andras Bacsai
4086dfcf56 rename lavalink 2022-11-10 15:32:13 +01:00
Andras Bacsai
7937c2bab0 Merge pull request #717 from kaname-png/next
chore: add jda icon for lavalink service
2022-11-10 15:31:37 +01:00
Andras Bacsai
5ffa8e9936 update templates 2022-11-10 15:29:44 +01:00
Andras Bacsai
c431cee517 fix: wp + mysql on arm 2022-11-10 15:01:03 +01:00
Andras Bacsai
375f17e728 debug 2022-11-10 14:52:37 +01:00
Andras Bacsai
d3f658c874 Readme fix 2022-11-10 14:17:20 +01:00
Andras Bacsai
5e340a4cdd fix: expose ports for services 2022-11-10 14:13:58 +01:00
Andras Bacsai
409a5b9f99 fix: n8n and weblate icon 2022-11-10 14:08:02 +01:00
Andras Bacsai
fba305020b fix: for rollback 2022-11-10 14:00:01 +01:00
Andras Bacsai
bd4ce3ac45 feat: rollback coolify 2022-11-10 13:57:34 +01:00
Gabriel Engel
733de60f7c Starting translations work 2022-11-09 19:27:03 -03:00
Andras Bacsai
c365a44e01 Merge pull request #719 from coollabsio/next
v3.11.4
2022-11-09 14:20:23 +01:00
Andras Bacsai
e94f450bf0 fix: doc links 2022-11-09 13:50:29 +01:00
Andras Bacsai
d5efc9ddde chore: version++ 2022-11-09 13:50:20 +01:00
Andras Bacsai
68895ba4a5 fix: variable replacements 2022-11-09 13:50:11 +01:00
Andras Bacsai
139aa7a0fc Merge pull request #718 from coollabsio/next
v3.11.3
2022-11-09 13:05:58 +01:00
Andras Bacsai
4955157e13 fix: compose webhooks fixed 2022-11-09 13:02:42 +01:00
Kaname
f2dd5cc75e chore: add jda icon for lavalink service 2022-11-08 12:39:41 -06:00
Andras Bacsai
2ad634dbc6 refactor: code 2022-11-08 15:51:07 +01:00
Andras Bacsai
de13f65a24 fix: umami template 2022-11-08 15:23:18 +01:00
Andras Bacsai
8994dde8f0 Merge pull request #715 from coollabsio/next
v3.11.2
2022-11-08 14:55:51 +01:00
Andras Bacsai
b7303a0828 fix: remove contribution docs 2022-11-08 14:44:54 +01:00
Andras Bacsai
5bc330162a Merge pull request #709 from gabrielengel/g-contribute
Organizing Contribution.md
2022-11-08 14:43:57 +01:00
Andras Bacsai
0ebc0217f3 fix: umami + ghost issues 2022-11-08 14:42:04 +01:00
Andras Bacsai
95c810b80a Merge pull request #714 from coollabsio/next
v3.11.2
2022-11-08 12:09:09 +01:00
Andras Bacsai
82d7fb883d fix: more simplified webhooks 2022-11-08 11:54:22 +01:00
Andras Bacsai
b96e710543 fix: remove ghost-mariadb from the list 2022-11-08 11:30:41 +01:00
Andras Bacsai
24e5e85225 revert staging release 2022-11-08 11:22:53 +01:00
Andras Bacsai
7b8f81f1b2 Merge pull request #713 from coollabsio/next
fixes
2022-11-08 11:21:41 +01:00
Andras Bacsai
62e60fc7ab fix: simplify webhooks 2022-11-08 11:15:56 +01:00
Andras Bacsai
ccd3d4aded fix: preview webhooks 2022-11-08 10:40:11 +01:00
Andras Bacsai
f0cf155b5c Merge pull request #712 from coollabsio/next
fix: migrate template
2022-11-08 10:33:20 +01:00
Andras Bacsai
b66f67d889 fix: migrate template 2022-11-08 10:19:02 +01:00
Andras Bacsai
e5dc07bde1 Merge pull request #711 from coollabsio/next
Quick fixes
2022-11-08 10:09:16 +01:00
Andras Bacsai
a955eb0fec fix: coolify instance proxy 2022-11-08 10:08:47 +01:00
Andras Bacsai
c070af9681 Merge pull request #710 from coollabsio/next
v3.11.1
2022-11-08 09:55:03 +01:00
Andras Bacsai
c15e060ef2 fix: appwrite webhook 2022-11-08 09:54:25 +01:00
Gabriel Engel
6d6f2454a7 Link Contribution on Readme 2022-11-07 19:02:59 -03:00
Gabriel Engel
9ff44ed46b Fix Typo 2022-11-07 18:58:54 -03:00
Gabriel Engel
adf3ef61b8 Link GettingStarted.md 2022-11-07 18:58:06 -03:00
Gabriel Engel
832107e0b8 Requirements 2022-11-07 18:56:14 -03:00
Gabriel Engel
0ea1e71808 Organizing contributing 2022-11-07 18:44:47 -03:00
Andras Bacsai
3b9b3f8ffa Merge pull request #708 from coollabsio/next
Fixes for v3.11.0
2022-11-07 15:21:38 +01:00
Andras Bacsai
fda8823050 fix: plausible analytics things 2022-11-07 14:59:39 +01:00
Andras Bacsai
b5756cb14f save templates 2022-11-07 14:10:37 +01:00
Andras Bacsai
617d3dbe52 fix: templates 2022-11-07 13:48:57 +01:00
Andras Bacsai
c18beb1c7c fix: templates 2022-11-07 13:40:18 +01:00
Andras Bacsai
a6957b919c fix: template 2022-11-07 13:38:55 +01:00
Andras Bacsai
816a362534 fix: confirm restart service 2022-11-07 13:35:45 +01:00
Andras Bacsai
7ce3ebde4e fix: templates 2022-11-07 13:30:58 +01:00
Andras Bacsai
cc2f83c4d9 Merge pull request #705 from coollabsio/next
v3.11.0
2022-11-07 12:02:32 +01:00
Andras Bacsai
6ce492049e fix 2022-11-07 11:52:34 +01:00
Andras Bacsai
a7999de4b0 fix: compose icon 2022-11-07 11:27:17 +01:00
Andras Bacsai
d4bdfabf19 chore: version++ 2022-11-07 11:03:16 +01:00
Andras Bacsai
85030ab804 fix: template files 2022-11-07 10:44:50 +01:00
Andras Bacsai
2a9bd00a50 fixes 2022-11-07 09:36:51 +01:00
Andras Bacsai
1c2d76e651 UI updates 2022-11-07 08:59:06 +01:00
Andras Bacsai
a97f7d225a fix: remove old minio proxies 2022-11-04 21:34:24 +01:00
Andras Bacsai
2781848aac cleanup 2022-11-04 21:15:08 +01:00
Andras Bacsai
d179da2bee fix 2022-11-04 21:11:38 +01:00
Andras Bacsai
60e7922734 stop minio proxy on restart 2022-11-04 21:07:19 +01:00
Andras Bacsai
1ece37ec3c update template 2022-11-04 15:20:31 +01:00
Andras Bacsai
8dad865146 contribution guide 2022-11-04 14:52:21 +01:00
Andras Bacsai
80d15e782b fixes 2022-11-04 14:41:52 +01:00
Andras Bacsai
d24e4c6518 fix icons 2022-11-04 14:24:22 +01:00
Andras Bacsai
6def46544c fix: wh catchall for all 2022-11-04 12:11:04 +01:00
Andras Bacsai
d66bae32d3 fix: preview wbh 2022-11-04 12:08:26 +01:00
Andras Bacsai
1b753a4020 fix: Pr stopps main deployment 2022-11-04 12:08:20 +01:00
Andras Bacsai
25e6a74a0a fix: wb for previews 2022-11-04 11:45:47 +01:00
Andras Bacsai
afde00a4be fix: websecure redirect 2022-11-04 11:44:04 +01:00
Andras Bacsai
0022d380bb fix: webhooks 2022-11-04 11:26:43 +01:00
Andras Bacsai
d80d2ab934 fix: previews wbh 2022-11-04 10:52:08 +01:00
Andras Bacsai
13c1734753 feat: redirect catch-all rule 2022-11-04 10:50:16 +01:00
Andras Bacsai
bf33d6c34e fix: remote webhooks 2022-11-04 09:58:37 +01:00
Andras Bacsai
1b02f9bd5d fix: webhook simplified 2022-11-04 09:54:13 +01:00
Andras Bacsai
9f63c645ff fix: load public repos 2022-11-04 09:53:57 +01:00
Andras Bacsai
abf271fb68 fixes 2022-11-03 15:54:39 +01:00
Andras Bacsai
363755c3bf fix doclinks 2022-11-03 15:37:39 +01:00
Andras Bacsai
ba2db666aa updates 2022-11-03 15:17:31 +01:00
Andras Bacsai
9f3677b694 updates 2022-11-03 14:59:37 +01:00
Andras Bacsai
e6024c997f debug more 2022-11-03 14:41:58 +01:00
Andras Bacsai
3cb83e2286 debug 2022-11-03 14:28:53 +01:00
Andras Bacsai
780d03e5e1 fixes 2022-11-03 14:16:51 +01:00
Andras Bacsai
214114e6ce fixes 2022-11-03 14:09:06 +01:00
Andras Bacsai
274d3fe679 fix wh again 2022-11-03 13:50:04 +01:00
Andras Bacsai
0ecf86d8a3 remove debug 2022-11-03 13:47:49 +01:00
Andras Bacsai
c2d4390a72 fix wh 2022-11-03 13:46:51 +01:00
Andras Bacsai
7627d59d43 debug 2022-11-03 13:37:48 +01:00
Andras Bacsai
71ce9a6b37 fix: pathprefix 2022-11-03 13:28:58 +01:00
Andras Bacsai
232018c925 fix 2022-11-03 11:40:55 +01:00
Andras Bacsai
9dfbbe58ff fix: toast, rde, webhooks 2022-11-03 11:32:18 +01:00
Andras Bacsai
fa9738a2e0 fix: tooltip 2022-11-03 10:13:36 +01:00
Andras Bacsai
94ecbc5921 fix: app logs view 2022-11-03 09:43:41 +01:00
Andras Bacsai
3c68d317d7 fix: traefik proxy q 10s 2022-11-03 09:43:34 +01:00
Andras Bacsai
56d4edfb9d fix: toast 2022-11-03 09:43:23 +01:00
Andras Bacsai
c6c037ff17 fixes 2022-11-03 09:31:01 +01:00
Andras Bacsai
44feba4d89 fix branches 2022-11-03 08:14:57 +01:00
Andras Bacsai
962f2c7380 Merge pull request #682 from Huskehhh/main
fix: expose port is readonly on the wrong condition
2022-11-02 22:49:43 +01:00
Andras Bacsai
22007426aa fix migration 2022-11-02 22:44:46 +01:00
Andras Bacsai
008e9a92d3 fixes 2022-11-02 22:32:09 +01:00
Andras Bacsai
41139ee2ab fixes 2022-11-02 22:11:32 +01:00
Andras Bacsai
845c40d23c fixes 2022-11-02 21:31:16 +01:00
Andras Bacsai
a22f26c4c8 fixes 2022-11-02 16:03:27 +01:00
Andras Bacsai
99ff020f56 fix 2022-11-02 15:37:51 +01:00
Andras Bacsai
f863b42b71 fix: heroku bp 2022-11-02 15:36:23 +01:00
Andras Bacsai
2e713b459e fixes 2022-11-02 15:19:20 +01:00
Andras Bacsai
923241ce1e fixes 2022-11-02 10:08:22 +01:00
Andras Bacsai
3a8929b9d7 fix 2022-11-02 09:58:14 +01:00
Andras Bacsai
eb92d39d40 replace ws with socketio 2022-11-02 09:49:21 +01:00
Andras Bacsai
bdc62a007e fixes 2022-10-28 20:30:34 +00:00
Andras Bacsai
4b35db6291 wss 2022-10-28 14:51:22 +00:00
Andras Bacsai
c8282b215d use window location for ws in prod 2022-10-28 14:29:21 +00:00
Andras Bacsai
c123669828 test ws 2022-10-28 15:50:57 +02:00
Andras Bacsai
781fd0a1cd fixes 2022-10-28 12:03:07 +02:00
Andras Bacsai
9bd99605fb stop wp ftp on service stop 2022-10-28 12:02:22 +02:00
Andras Bacsai
dc626bd4f0 fixes 2022-10-28 11:54:03 +02:00
Andras Bacsai
aa27aeafa1 fix fqdn check 2022-10-28 09:15:03 +02:00
Andras Bacsai
cdb25cd0e9 remove console.log 2022-10-27 20:06:21 +00:00
Andras Bacsai
dc2d15fd9c fix 2022-10-27 20:05:02 +00:00
Andras Bacsai
55cb788380 fixes 2022-10-27 19:21:33 +00:00
Andras Bacsai
0f3b7fe643 fixes 2022-10-27 18:55:21 +00:00
Andras Bacsai
4b812350a8 fix migrations 2022-10-27 13:37:45 +02:00
Andras Bacsai
aec37164de debug 2022-10-27 13:18:24 +02:00
Andras Bacsai
dec02bd8db update pnpm lock 2022-10-27 12:06:22 +02:00
Andras Bacsai
1bd6a8ed9e update dockerfile 2022-10-27 12:06:12 +02:00
Andras Bacsai
2030f714fa cleanup stuffs 2022-10-27 09:55:32 +02:00
Andras Bacsai
4416646954 package updates + tags selector 2022-10-26 15:50:10 +02:00
Andras Bacsai
52ba9dc02a Update devTemplates.yaml 2022-10-26 15:44:29 +02:00
Andras Bacsai
dad3d42d14 fixes 2022-10-26 14:12:27 +02:00
Andras Bacsai
0d12f3043b fix + cleanup 2022-10-26 13:44:32 +02:00
Andras Bacsai
1225786fc0 cleanup + fixes 2022-10-26 11:21:55 +02:00
Andras Bacsai
71496d5229 cleanup + arm support 2022-10-26 10:49:30 +02:00
Andras Bacsai
eb0aa20fe1 fixes 2022-10-26 10:27:33 +02:00
Andras Bacsai
c34de3d0a3 fixes 2022-10-26 10:12:17 +02:00
Andras Bacsai
54e0a9fc28 fix 2022-10-26 09:35:56 +02:00
Andras Bacsai
4bcd034b3d fixes 2022-10-26 09:27:43 +02:00
Andras Bacsai
111bd29cc8 updates 2022-10-25 22:43:31 +02:00
ThallesP
e038865693 feature: add default to latest commit and support for gitlab 2022-10-25 13:51:10 -03:00
ThallesP
dfd29dc37a feature: initial support for specific git commit 2022-10-25 13:26:03 -03:00
Andras Bacsai
b0fcd23ca6 template update 2022-10-25 15:59:04 +02:00
Andras Bacsai
f80b1d31f5 fixes, dev templates, etc 2022-10-25 15:12:40 +02:00
Andras Bacsai
811ea5b92a fixes 2022-10-24 22:54:19 +02:00
Andras Bacsai
f9dfbd5800 fix 2022-10-24 21:54:33 +02:00
Andras Bacsai
88f1c36929 fixes and remote tempaltes 2022-10-24 14:23:34 +02:00
Andras Bacsai
8bbe771f5b fake service in dev 2022-10-24 13:12:31 +02:00
Andras Bacsai
c578fa63e5 updates 2022-10-24 13:10:05 +02:00
The Mark
4448b86b93 fix: Accept logged and not logged user in /base 2022-10-23 13:31:24 -04:00
Andras Bacsai
17badf95dc fix 2022-10-21 22:34:27 +02:00
Andras Bacsai
a267ee40d2 asd 2022-10-21 22:15:50 +02:00
Andras Bacsai
8ef645b3c2 fix 2022-10-21 22:13:29 +02:00
Andras Bacsai
35625b22f5 hm 2022-10-21 22:05:05 +02:00
Andras Bacsai
221dcefd6c fix 2022-10-21 21:24:52 +02:00
Andras Bacsai
9c74a9c1db fixes 2022-10-21 21:19:30 +02:00
Andras Bacsai
55fc3920fc updates 2022-10-21 20:54:33 +02:00
Andras Bacsai
5d60b5eb8b saving things 2022-10-21 15:51:32 +02:00
Andras Bacsai
049d5166e8 add template.json 2022-10-21 11:30:16 +02:00
Andras Bacsai
f4019db3d1 fixes 2022-10-20 16:06:33 +02:00
Andras Bacsai
9f3732d35b fix: service logs 2022-10-20 10:42:47 +02:00
Andras Bacsai
b4f17ac3c6 add weblate 2022-10-20 10:03:23 +02:00
Andras Bacsai
978e35d335 add searxng 2022-10-20 09:44:08 +02:00
Andras Bacsai
22cbbec960 add searxng 2022-10-20 09:18:13 +02:00
Andras Bacsai
21f3a70788 add glitchtip 2022-10-19 14:59:38 +02:00
Andras Bacsai
b4c6f80e1c add hasura 2022-10-19 14:15:48 +02:00
Andras Bacsai
e1198c42eb add hasura 2022-10-19 13:14:39 +02:00
Andras Bacsai
e09fdbcef0 add umami + hashedpws 2022-10-19 12:00:43 +02:00
Andras Bacsai
b708e79929 add umami 2022-10-19 11:26:27 +02:00
Andras Bacsai
cbaecff3b7 meilisearch 2022-10-19 10:55:16 +02:00
Andras Bacsai
4f7d2630af meilisearch 2022-10-19 10:29:08 +02:00
Andras Bacsai
92d3860240 ghost 2022-10-19 10:24:53 +02:00
Andras Bacsai
3757d5da9f ghost 2022-10-19 10:07:04 +02:00
Andras Bacsai
1d38a885bb add wordpress 2022-10-18 15:17:59 +02:00
Andras Bacsai
dbd767e8f1 wordpress 2022-10-18 15:01:18 +02:00
Andras Bacsai
8b83c38127 vscode 2022-10-18 14:45:30 +02:00
Andras Bacsai
f1ea01e709 vscodeserver + minio 2022-10-18 14:34:10 +02:00
Andras Bacsai
12a1aeb0f8 add minio 2022-10-18 14:12:33 +02:00
Andras Bacsai
413150012f fix 2022-10-18 13:57:48 +02:00
Andras Bacsai
8ef5604ce8 updates 2022-10-18 13:52:47 +02:00
Andras Bacsai
42e50c800b fix 2022-10-18 12:51:53 +02:00
Andras Bacsai
8fbd08003c fixes 2022-10-18 12:43:35 +02:00
Andras Bacsai
877577efdb plausible migration done 2022-10-18 12:02:09 +02:00
Andras Bacsai
a6f457749b lots of changes 2022-10-18 11:32:38 +02:00
Andras Bacsai
9afb713df1 add length option to template 2022-10-17 14:55:28 +00:00
Andras Bacsai
8f660c0276 work-work 2022-10-17 15:43:57 +02:00
Andras Bacsai
a7e86d9afd fix 2022-10-14 15:54:19 +02:00
Andras Bacsai
462eea90c0 tons of updates 2022-10-14 15:48:37 +02:00
Andras Bacsai
79c30dfc91 remove nix file 2022-10-14 09:45:45 +02:00
Jordyn
410a78b366 fix: expose port is readonly on the wrong condition 2022-10-14 10:49:53 +11:00
Andras Bacsai
065807a0bc fix: secret errors 2022-10-13 15:43:57 +02:00
Andras Bacsai
1d93658e56 Merge pull request #680 from coollabsio/next
v3.10.16
2022-10-12 22:05:39 +02:00
Andras Bacsai
2b7865e6ea update packages 2022-10-12 19:57:32 +00:00
Andras Bacsai
0cdba8c329 fix: single container logs and usage with compose 2022-10-12 19:53:21 +00:00
Andras Bacsai
11b317b788 ui: new resource label 2022-10-12 15:10:00 +02:00
Andras Bacsai
fb955e15f4 Merge pull request #656 from coollabsio/next
v3.10.15
2022-10-12 14:51:56 +02:00
Andras Bacsai
ae2d141f0d fix: pull does not work remotely on huge compose file 2022-10-12 14:27:13 +02:00
Andras Bacsai
68c983923e fix: dockerfile 2022-10-12 14:08:00 +02:00
Andras Bacsai
bf252f7f20 fix: appwrite v1 missing containers 2022-10-12 13:50:28 +02:00
Andras Bacsai
324038486f fixes 2022-10-12 12:02:47 +02:00
Andras Bacsai
bef5da49cf remove text 2022-10-12 11:43:47 +02:00
Andras Bacsai
24e7f547fa feat: monitoring by container 2022-10-12 11:42:45 +02:00
Andras Bacsai
3ee3ab0ad1 fix: port required if fqdn is set 2022-10-12 11:27:13 +02:00
Andras Bacsai
f734154da8 fix: check compose domains in general 2022-10-12 11:17:02 +02:00
Andras Bacsai
7a053ce697 fix: gitlab auth and compose reload 2022-10-12 11:11:18 +02:00
Andras Bacsai
25f250310e fix: dev container 2022-10-12 10:18:28 +02:00
Andras Bacsai
4eca05bbba fix: gh release 2022-10-12 10:07:18 +02:00
Andras Bacsai
45b0f791bb ci: update staging release 2022-10-11 10:54:22 +02:00
Andras Bacsai
42415a81c1 fix: update docker binaries 2022-10-11 10:54:13 +02:00
Andras Bacsai
6882e83d1e fix: logs for not running containers 2022-10-10 15:28:46 +02:00
Andras Bacsai
d4b7318413 fix: smart search for new services 2022-10-10 15:28:36 +02:00
Andras Bacsai
a2b4d400af fix: add git sha to build args 2022-10-10 15:28:16 +02:00
Andras Bacsai
f07868d24e fix: do not show nope as ip address for dbs 2022-10-10 15:28:02 +02:00
Andras Bacsai
d46ee049f4 Merge pull request #668 from bstst/bugfix/fix-deno-run-options
Fix deno options string
2022-10-10 15:26:00 +02:00
Andras Bacsai
c62eda5627 Merge pull request #666 from cmer/cmer-inject-git-sha
Inject GIT SHA into build
2022-10-10 15:17:18 +02:00
Martin Saulis
7683164ed2 fix deno options string 2022-10-07 14:59:16 +03:00
Carl Mercier
7090c16575 Update common.ts 2022-10-06 15:42:57 -04:00
Carl Mercier
9e634fed13 Inject GIT SHA into build process
As discussed at https://github.com/docker/hub-feedback/issues/600
2022-10-06 15:38:15 -04:00
Andras Bacsai
9bb125cebd feat: docker compose 2022-10-06 15:51:08 +02:00
Andras Bacsai
0c4850b91d fixes 2022-10-06 14:24:28 +02:00
Andras Bacsai
0eb7688c4d fixes 2022-10-06 14:15:05 +02:00
Andras Bacsai
f47cdb68d9 fixes 2022-10-06 12:01:24 +02:00
Andras Bacsai
d3c3cded37 debug: one less worker thread 2022-10-06 11:44:36 +02:00
Andras Bacsai
e91ea4ecbe feat: docker compose 2022-10-06 11:37:47 +02:00
Andras Bacsai
680b20d199 update dev container flow 2022-10-06 11:37:42 +02:00
Andras Bacsai
ec97e04fd4 revert last debug 2022-10-06 11:37:28 +02:00
Andras Bacsai
b0b2657fe0 debug: remove worker jobs 2022-10-06 10:26:40 +02:00
Andras Bacsai
d27426fd8f feat: docker compose support 2022-10-06 10:25:41 +02:00
Andras Bacsai
d8206c0e3e wip: docker compose 2022-10-05 15:34:52 +02:00
Andras Bacsai
3f1841a188 init: docker-compose support 2022-10-05 10:27:12 +00:00
Andras Bacsai
cb478e0dc8 add contribution guide on container based development flow 2022-10-05 09:13:51 +00:00
Andras Bacsai
02c42a7e3a fix: pure docker based development 2022-10-05 09:01:17 +00:00
Andras Bacsai
ef40f7349e Merge pull request #653 from coollabsio/next
v3.10.14
2022-10-05 09:24:39 +02:00
Andras Bacsai
86eebb35cb fix: do not use npx 2022-10-05 08:58:14 +02:00
Andras Bacsai
a901388887 revert 2022-10-04 23:06:46 +02:00
Andras Bacsai
6cd1c5de38 Dockerfile update 2022-10-04 22:22:29 +02:00
Andras Bacsai
7489f172a1 test prestart 2022-10-04 22:14:16 +02:00
Andras Bacsai
702798c275 revert things 2022-10-04 22:00:50 +02:00
Andras Bacsai
430d51866c test: remove prisma 2022-10-04 21:46:23 +02:00
Andras Bacsai
9d08421f01 dev intervals 2022-10-04 21:46:01 +02:00
Andras Bacsai
f4051874b2 Merge pull request #654 from vvvctr/patch-1
Proper capitalization for WordPress service type.
2022-10-04 21:14:52 +02:00
vvvctr
bbe0690056 Proper capitalization for WordPress service type. 2022-10-04 16:27:56 +02:00
Andras Bacsai
772c0d1e41 fix: nope if you are not logged in 2022-10-04 15:14:52 +02:00
Andras Bacsai
8eb9ca0260 fix: add buildkit features 2022-10-04 15:06:09 +02:00
Andras Bacsai
bd27afe0da fix: verify and configure remote docker engines 2022-10-04 15:01:47 +02:00
Andras Bacsai
a3af21275a fix: meilisearch data dir 2022-10-04 14:05:11 +02:00
Andras Bacsai
61eb155d13 chore: version++ 2022-10-04 14:05:01 +02:00
Andras Bacsai
7932c1c4a9 Merge pull request #648 from coollabsio/next
v3.10.13
2022-10-03 12:56:59 +02:00
Andras Bacsai
f776fb83e7 ui: settings icon 2022-10-03 11:45:24 +02:00
Andras Bacsai
a97521aba2 webhook: send 200 for ping and installation wh 2022-10-03 11:42:07 +02:00
Andras Bacsai
d1c0fe503e fix: remove unnecessary things 2022-10-03 11:32:15 +02:00
Andras Bacsai
ed02c1ae36 ui: iam & settings update 2022-10-03 11:31:50 +02:00
Andras Bacsai
9a67cf7355 fix: fork pr previews 2022-10-03 09:48:47 +02:00
Andras Bacsai
755eeda364 remove inspector 2022-10-03 09:25:31 +02:00
Andras Bacsai
136dee7747 ui: fix indicato 2022-10-03 09:20:57 +02:00
Andras Bacsai
e4e8428855 minify api 2022-10-02 11:08:04 +00:00
Andras Bacsai
de8dc021f9 fix: pr branches 2022-10-02 09:37:08 +00:00
Andras Bacsai
991587f252 fix: typo 2022-10-02 09:24:43 +00:00
Andras Bacsai
8dbcf257c4 fix: handle forked repositories 2022-10-02 09:16:51 +00:00
Andras Bacsai
0b067364a9 fix: default 0 pending invitations 2022-10-02 08:55:36 +00:00
Andras Bacsai
5367bd6134 show webhook details 2022-10-02 08:48:56 +00:00
Andras Bacsai
92228c4379 schema migration 2022-10-02 08:43:45 +00:00
Andras Bacsai
fb2c7896b3 update packages 2022-10-02 08:43:36 +00:00
Andras Bacsai
23265d9091 revert last changes 2022-10-02 10:38:08 +02:00
Andras Bacsai
2c9bb0e767 disable stuff 2022-10-01 13:58:50 +00:00
Andras Bacsai
f9e8400d83 temporary disable schedulers 2022-10-01 13:46:52 +00:00
Andras Bacsai
927a13cd76 temporary enable inspector 2022-10-01 13:03:55 +00:00
Andras Bacsai
51b3293e69 ui: inprogress version of iam 2022-09-29 15:46:52 +02:00
Andras Bacsai
3f76cadea9 fix: cleanup stucked tcp proxies 2022-09-29 14:44:20 +02:00
Andras Bacsai
6dbf53b558 chore: version++ 2022-09-29 14:32:55 +02:00
Andras Bacsai
22e937c798 fix: do not start tcp proxy without main container 2022-09-29 14:32:35 +02:00
Andras Bacsai
ac5cc8b299 Merge pull request #643 from coollabsio/next
v3.10.12
2022-09-29 14:09:37 +02:00
Andras Bacsai
c588ab723b fix: show logs better 2022-09-29 13:57:52 +02:00
Andras Bacsai
4b2dfc051d typo 2022-09-29 13:47:15 +02:00
Andras Bacsai
5238c83f3f fix: initial deploy status 2022-09-29 13:23:38 +02:00
Andras Bacsai
90bb580e50 ui: fixes 2022-09-29 13:23:29 +02:00
Andras Bacsai
f40e142704 ui: fix 2022-09-29 13:15:19 +02:00
Andras Bacsai
a67618675d fix: default buildImage and baseBuildImage 2022-09-29 13:15:16 +02:00
Andras Bacsai
4fe436e4d1 fix: dashboard statuses 2022-09-29 13:02:10 +02:00
Andras Bacsai
683b8c966f feat: cleanup unconfigured services and databases 2022-09-28 15:41:20 +02:00
Andras Bacsai
28377a156d feat: cleanup unconfigured applications 2022-09-28 11:45:02 +02:00
Andras Bacsai
3dcc4faabb Merge pull request #642 from coollabsio/next
v3.10.11
2022-09-28 11:18:01 +02:00
Andras Bacsai
60a033f93a ui: fix 2022-09-28 11:16:35 +02:00
Andras Bacsai
436bd73786 fix: baseDirectory 2022-09-28 11:14:23 +02:00
Andras Bacsai
5c69ff3339 fix: do not get status of more than 10 resources defined by category 2022-09-28 10:59:58 +02:00
Andras Bacsai
2105b1e7c4 ux: hasura console notification 2022-09-28 10:55:08 +02:00
Andras Bacsai
523004e5b2 chore: version++ 2022-09-28 10:54:57 +02:00
Andras Bacsai
5e02c386ec Merge pull request #641 from coollabsio/next
v3.10.10
2022-09-28 10:49:19 +02:00
Andras Bacsai
b4501fe52d ui: beta flag 2022-09-28 10:41:32 +02:00
Andras Bacsai
3c29eaa1b1 ui: small fix 2022-09-28 10:35:47 +02:00
Andras Bacsai
ee67e163b1 feat: system-wide github apps 2022-09-28 10:34:27 +02:00
Andras Bacsai
9662bc29fb ui: fix gitlab importer view 2022-09-28 09:56:27 +02:00
Andras Bacsai
96f2660b98 ui: loading button 2022-09-28 09:47:05 +02:00
Andras Bacsai
20f594c66c chore: version++ 2022-09-28 09:30:57 +02:00
Andras Bacsai
2b8d59dca3 Merge pull request #637 from coollabsio/next
v3.10.9
2022-09-28 09:29:31 +02:00
Andras Bacsai
d44047d109 ui: dev logs 2022-09-28 09:19:51 +02:00
Andras Bacsai
57c4d33bd3 ui: main resource search 2022-09-28 09:07:59 +02:00
Andras Bacsai
7a5377efe0 ui: resource button fix 2022-09-28 09:07:46 +02:00
Andras Bacsai
91e7cffccc fix: only log things to console in dev mode 2022-09-28 08:39:59 +02:00
Andras Bacsai
df31e47313 fix: disable development low disk space 2022-09-28 08:39:33 +02:00
Andras Bacsai
cb9586270c fix: able to delete apps in unconfigured state 2022-09-27 09:27:28 +00:00
Andras Bacsai
21dfa5227c fix: logs in docker bp 2022-09-26 20:37:58 +00:00
Andras Bacsai
9d15d2be77 chore: version++ 2022-09-26 20:22:08 +00:00
Andras Bacsai
929c02d31f ui: fix basedirectory meaning 2022-09-26 20:21:41 +00:00
Andras Bacsai
846185dd42 Merge pull request #635 from coollabsio/next
v3.10.8
2022-09-26 21:36:54 +02:00
Andras Bacsai
7bc2299a8e rename grafana dashboard to grafana 2022-09-26 19:24:13 +00:00
Andras Bacsai
d40e131bd8 fix: appwrite function network is not the default 2022-09-26 19:20:41 +00:00
Andras Bacsai
552c7297bf ui: service fixes 2022-09-26 19:00:36 +00:00
Andras Bacsai
3f5fd23955 ui: fix button 2022-09-26 18:57:24 +00:00
Andras Bacsai
8b8566251e chore: version++ 2022-09-26 18:49:05 +00:00
Andras Bacsai
6db47def8e fix: service logs 2022-09-26 18:48:22 +00:00
Andras Bacsai
1d0edc7b25 Merge pull request #634 from coollabsio/next
v3.10.7
2022-09-26 15:43:27 +02:00
Andras Bacsai
f9a417638a fix: seed 2022-09-26 13:42:19 +00:00
Andras Bacsai
984fe01551 Merge pull request #632 from coollabsio/next
v3.10.6
2022-09-26 13:43:49 +02:00
Andras Bacsai
d0cb350687 fix: error notification 2022-09-26 13:37:06 +02:00
Andras Bacsai
5f51011ce1 fix: empty preview value 2022-09-26 13:36:54 +02:00
Andras Bacsai
9ca125ac55 fix: error notification 2022-09-26 13:36:47 +02:00
Andras Bacsai
360f4f8c27 fix: seed new preview secret types 2022-09-26 13:36:15 +02:00
Andras Bacsai
6501f71bd6 chore: version++ 2022-09-26 13:24:02 +02:00
Andras Bacsai
bf6b799dba Merge pull request #625 from coollabsio/next
v3.10.5
2022-09-26 11:23:01 +02:00
Andras Bacsai
5f57279283 fix: multiplex ssh and ssl copy 2022-09-26 11:15:14 +02:00
Andras Bacsai
5ed3565520 ui: Beta features 2022-09-26 10:31:52 +02:00
Andras Bacsai
513fa90b8a ui: fixes 2022-09-26 10:27:51 +02:00
Andras Bacsai
a4d9b9689b fix: laravel php chooser 2022-09-26 10:27:42 +02:00
Andras Bacsai
1c05c0dcbb ui: fix 2022-09-26 10:21:01 +02:00
Andras Bacsai
a1b49a3a6b fix: base directory & docker bp 2022-09-26 10:20:53 +02:00
Andras Bacsai
6f57298cbb fix: scp without host verification & cert copy 2022-09-26 09:52:04 +02:00
Andras Bacsai
d8ce673088 fix: debug log for bp 2022-09-25 08:00:53 +00:00
Andras Bacsai
4cd7af7a74 fix: stream logs for heroku bp 2022-09-25 07:58:52 +00:00
Andras Bacsai
49c61b5992 fix: allow basedirectory for heroku 2022-09-25 07:48:03 +00:00
Andras Bacsai
e44d0550d2 fix: basedirectory should be empty if null 2022-09-25 07:47:54 +00:00
Andras Bacsai
17f82109b6 fix: consider base directory in heroku bp 2022-09-25 07:47:37 +00:00
Andras Bacsai
2d8888ae9b ui: fixes 2022-09-23 20:01:30 +00:00
Andras Bacsai
4abe9c6fb2 feat: ssl certificate sets custom ssl for applications 2022-09-23 15:21:19 +02:00
Andras Bacsai
f9d94fa660 ui: fixes 2022-09-23 14:20:37 +02:00
Andras Bacsai
eaa13f4990 remove self-signed certs 2022-09-23 14:10:17 +02:00
Andras Bacsai
01fd5901fe ui: fixes
fix: secret saving process
2022-09-23 14:09:26 +02:00
Andras Bacsai
3d6adeffc4 ui: more UI improvements 2022-09-22 15:48:16 +02:00
Andras Bacsai
9066952759 ui: redesign applications & settings
fix: follow logs
2022-09-22 12:30:28 +02:00
Andras Bacsai
6dd7f6274a fix: error during saving logs 2022-09-22 12:30:04 +02:00
Andras Bacsai
7a8fe6d152 ui: settings view 2022-09-22 09:47:33 +02:00
Andras Bacsai
be507be3a9 feat: refresh resource status on dashboard 2022-09-22 09:47:25 +02:00
Andras Bacsai
657b97f190 ui: fix destination view 2022-09-22 09:09:31 +02:00
Andras Bacsai
9d7745cd9b fix: settings db requests 2022-09-22 09:04:44 +02:00
Andras Bacsai
3668f83693 fix: not found redirect 2022-09-22 09:04:32 +02:00
Andras Bacsai
a2d5d99c1f fix: able to search with id 2022-09-22 09:03:34 +02:00
Andras Bacsai
f379ef6a3b feat: ssl cert on traefik config 2022-09-22 09:03:19 +02:00
Andras Bacsai
510a748749 fix: multiplex ssh connections 2022-09-22 09:02:53 +02:00
Andras Bacsai
550150d685 fix: db migration 2022-09-22 09:02:39 +02:00
Andras Bacsai
011ea9659e fix: ssl certificate distribution 2022-09-22 09:02:27 +02:00
Andras Bacsai
6eca7d948e add migration 2022-09-21 15:48:49 +02:00
Andras Bacsai
90e639f119 feat: custom certificate 2022-09-21 15:48:32 +02:00
Andras Bacsai
86ac6461d1 fix: dropdown 2022-09-20 13:33:35 +00:00
Andras Bacsai
18a95bf9ab ui: improvements 2022-09-20 15:06:33 +02:00
Andras Bacsai
7949bbe66d Merge branch 'temp' into next 2022-09-20 14:58:25 +02:00
Andras Bacsai
4b603c452a Merge pull request #602 from c0ldfront/trilium-notes-service
add trilium-notes-service
2022-09-20 14:54:53 +02:00
Andras Bacsai
837f0634b6 Merge pull request #606 from c0ldfront/grafana-service
add grafana-dashboard-service
2022-09-20 14:51:06 +02:00
Andras Bacsai
78076f7854 Merge branch 'next' into grafana-service 2022-09-20 14:50:37 +02:00
Andras Bacsai
719350cee1 Merge pull request #614 from c0ldfront/minio-login-issue
fix: MinIO invalid login
2022-09-20 14:49:41 +02:00
Andras Bacsai
4f6be3e6f5 revert: show usage everytime 2022-09-20 14:37:29 +02:00
Andras Bacsai
8e61e9fecb feat: Add migration button to appwrite 2022-09-20 14:36:06 +02:00
Andras Bacsai
2083285d78 version correction 2022-09-20 14:34:54 +02:00
Andras Bacsai
034e86e2cb ui: small logs on mobile 2022-09-20 13:07:49 +02:00
Andras Bacsai
f4a2d5c652 ui: dropdown as infobox 2022-09-19 14:01:48 +00:00
Andras Bacsai
534ccd6bf6 ui: fix git icon 2022-09-19 13:52:41 +00:00
Andras Bacsai
c17064f853 ui: fixes 2022-09-19 14:05:25 +02:00
Andras Bacsai
1e1566082f fix: tooltip 2022-09-19 12:14:14 +02:00
Andras Bacsai
449548654d Merge branch 'ui' into next 2022-09-19 12:06:00 +02:00
Andras Bacsai
6fc99524f0 ui: responsive! 2022-09-19 12:05:47 +02:00
Andras Bacsai
051629fad3 fix: ui 2022-09-19 08:57:55 +02:00
Andras Bacsai
f957008c1c Merge branch 'main' into ui 2022-09-19 08:57:48 +02:00
Andras Bacsai
98e1deec88 Merge pull request #612 from kaname-png/some-tweaks
feat(routes): ui for mobile and fixes
2022-09-19 08:54:23 +02:00
Andras Bacsai
99127652af fix: undead endpoint does not require JWT 2022-09-19 08:53:36 +02:00
Andras Bacsai
e9b9e9e82c fix: Appwrite default version 1.0 2022-09-19 08:53:22 +02:00
Andras Bacsai
2ed5c3746e chore: version++ 2022-09-19 08:53:11 +02:00
Kaname
2eda24799b Merge branch 'ui' into some-tweaks 2022-09-15 10:52:31 -06:00
Kaname
2c4bfab01a chore: whoops 2022-09-11 17:56:12 -06:00
Kaname
e689be552b Merge branch 'next' into some-tweaks 2022-09-11 17:54:04 -06:00
Andras Bacsai
e7038961ef Merge branch 'next' into some-tweaks 2022-09-11 13:56:07 +02:00
David Mydlarz
d5ece58f71 MINIO_SERVER_URL -> apiFqdn 2022-09-11 09:07:31 +09:00
Kaname
d7bbb5c4b7 chore: minor changes 2022-09-10 19:14:41 +00:00
Kaname
cf9c991c79 chore: minor changes 2022-09-10 19:03:09 +00:00
Kaname
0f0d96195d fix(routes): ui from secrets table 2022-09-10 19:00:43 +00:00
Kaname
3a562bb714 feat(routes): improve ui for apps, databases and services logs 2022-09-10 17:45:47 +00:00
Kaname
6381ba8478 fix(routes): header of settings page in databases 2022-09-10 17:27:48 +00:00
Kaname
9e3c14841a fix: ui with headers 2022-09-10 17:23:55 +00:00
Kaname
1917091338 Merge remote-tracking branch 'origin' into some-tweaks 2022-09-10 17:15:18 +00:00
Kaname
b1bb508554 Merge branch 'next' into some-tweaks 2022-09-10 17:14:44 +00:00
Kaname
4040b334f5 fix(routes): more ui tweaks 2022-09-10 01:54:59 +00:00
Kaname
d7e72519ef fix(routes): more ui tweaks 2022-09-10 01:30:07 +00:00
Kaname
c7752f0be9 fix(routes): more ui tweaks 2022-09-10 01:27:48 +00:00
Kaname
0ffe28a733 fix(routes): more ui tweaks 2022-09-10 01:23:17 +00:00
Kaname
56f24fe317 feat(styles): make header css component 2022-09-10 01:13:44 +00:00
Kaname
341cde2781 Merge branch 'next' into some-tweaks 2022-09-10 01:07:11 +00:00
Kaname
33bb8d434d feat(ui): improve header of pages 2022-09-10 00:16:49 +00:00
Kaname
9f813b7385 fix: github conflicts 2022-09-10 00:05:19 +00:00
Kaname
02a336a25d Merge remote-tracking branch 'origin' into some-tweaks 2022-09-09 23:57:03 +00:00
David Mydlarz
7df532fa72 Grafana Dashboard service completed 2022-09-09 00:17:47 +09:00
David Mydlarz
1f40c2ccf8 add trilium-notes-service 2022-09-08 17:32:11 +09:00
Kaname
4a8fd309c5 fix(routes): searchbar ui 2022-09-07 17:59:22 +00:00
Kaname
b416849d9c feat: re-apply ui improves 2022-09-07 17:14:29 +00:00
Kaname
bc321d8ced Merge branch 'next' into some-tweaks 2022-09-07 17:14:09 +00:00
Kaname
45919fc0cf fix(routes): duplicates classes in services page 2022-09-07 02:09:12 +00:00
Kaname
dd6f4c4844 fix(routes): ui from settings page 2022-09-07 02:03:48 +00:00
Kaname
bb47db033f fix(routes): more ui tweaks 2022-09-07 01:47:43 +00:00
Kaname
111ea78693 fix(routes): more ui tweaks 2022-09-07 01:47:04 +00:00
Kaname
c17253589a fix(routes): more ui tweaks 2022-09-07 01:45:05 +00:00
Kaname
7e6156f5dd fix(routes): more ui tweaks 2022-09-07 01:29:48 +00:00
Kaname
d5cfb63f52 fix(routes): ui from services page 2022-09-07 00:50:34 +00:00
Kaname
cab15055e7 fix(routes): ui from databases page 2022-09-07 00:22:56 +00:00
Kaname
9185910171 fix(routes): ui from databases page 2022-09-07 00:20:15 +00:00
Kaname
b4892e0caf fix(routes): ui from databases page 2022-09-07 00:16:57 +00:00
Kaname
83e0cafef9 chore: minor changes 2022-09-06 23:46:11 +00:00
Kaname
7cb75506c3 fix(routes): ui from destinations page 2022-09-06 23:43:11 +00:00
Kaname
ac6970ad40 fix(routes): improve design of git sources page 2022-09-06 19:06:27 +00:00
Kaname
5a95cc236c fix(routes): improve design of application page 2022-09-06 18:51:19 +00:00
Kaname
95c942f477 feat(layout): added drawer when user is in mobile 2022-09-06 17:37:26 +00:00
331 changed files with 30679 additions and 18916 deletions

View File

@@ -1,5 +1,6 @@
.DS_Store
node_modules
.pnpm-store
build
.svelte-kit
package
@@ -7,6 +8,9 @@ package
.env.*
!.env.example
dist
client
apps/api/db/*.db
local-serve
local-serve
apps/api/db/migration.db-journal
apps/api/core*
logs
others/certificates

View File

@@ -2,20 +2,25 @@ name: 🐞 Bug report
description: Create a bug report to help us improve coolify
title: "[Bug]: "
labels: [Bug]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report! Please fill the form in English
Thanks for taking the time to fill out this bug report! Please fill the form in English.
- type: checkboxes
attributes:
label: Is there an existing issue for this?
options:
- label: I have searched the existing issues
required: true
- type: input
id: repository
attributes:
label: Example public repository
description: "An example public git repository to reproduce the issue easily (if applicable)."
placeholder: "ex: https://github.com/coollabsio/coolify"
validations:
required: false
- type: textarea
attributes:
label: Description

View File

@@ -2,9 +2,6 @@ name: 🛠️ Feature request
description: Suggest an idea to improve coolify
title: '[Feature]: '
labels: [Enhancement]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:

View File

@@ -1,65 +1,81 @@
name: production-release
name: Production Release to ghcr.io
on:
release:
types: [released]
env:
REGISTRY: ghcr.io
IMAGE_NAME: "coollabsio/coolify"
jobs:
arm64-build:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/arm64
push: true
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
amd64-build:
amd64:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "v3"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64
push: true
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
aarch64:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "v3"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}-aarch64
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/aarch64
push: true
tags: ${{ steps.meta.outputs.tags }}-aarch64
labels: ${{ steps.meta.outputs.labels }}
merge-manifest:
runs-on: ubuntu-latest
needs: [amd64-build, arm64-build]
needs: [amd64, aarch64]
steps:
- name: Checkout
uses: actions/checkout@v3
@@ -67,18 +83,22 @@ jobs:
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
- name: Create & publish manifest
run: |
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
docker buildx imagetools create --append ${{ fromJSON(steps.meta.outputs.json).tags[0] }}-aarch64 --tag ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
- uses: sarisia/actions-status-discord@v1
if: always()
with:

View File

@@ -1,90 +0,0 @@
name: release-candidate
on:
release:
types: [prereleased]
jobs:
arm64-making-something-cool:
runs-on: [self-hosted, arm64]
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "next"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/arm64
push: true
tags: coollabsio/coolify:${{github.event.release.name}}-arm64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64,mode=max
- uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
amd64-making-something-cool:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "next"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64
push: true
tags: coollabsio/coolify:${{github.event.release.name}}-amd64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64,mode=max
- uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
merge-manifest-to-be-cool:
runs-on: ubuntu-latest
needs: [arm64-making-something-cool, amd64-making-something-cool]
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create & publish manifest
run: |
docker manifest create coollabsio/coolify:${{github.event.release.name}} --amend coollabsio/coolify:${{github.event.release.name}}-amd64 --amend coollabsio/coolify:${{github.event.release.name}}-arm64
docker manifest push coollabsio/coolify:${{github.event.release.name}}

View File

@@ -1,70 +1,76 @@
name: staging-release
name: Staging Release to ghcr.io
concurrency:
group: staging_environment
cancel-in-progress: true
on:
push:
branches:
- next
- "v3"
env:
REGISTRY: ghcr.io
IMAGE_NAME: "coollabsio/coolify"
jobs:
arm64-making-something-cool:
runs-on: [self-hosted, arm64]
amd64:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "next"
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/arm64
push: true
tags: coollabsio/coolify:next-arm64
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
amd64-making-something-cool:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "next"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
ref: "v3"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/amd64
push: true
tags: coollabsio/coolify:next-amd64,coollabsio/coolify:next-test
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
merge-manifest-to-be-cool:
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
aarch64:
runs-on:
group: aarch-runners
steps:
- name: Checkout
uses: actions/checkout@v3
with:
ref: "v3"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
platforms: linux/aarch64
push: true
tags: ${{ steps.meta.outputs.tags }}-aarch64
labels: ${{ steps.meta.outputs.labels }}
merge-manifest:
runs-on: ubuntu-latest
needs: [arm64-making-something-cool, amd64-making-something-cool]
needs: [amd64, aarch64]
steps:
- name: Checkout
uses: actions/checkout@v3
@@ -72,15 +78,20 @@ jobs:
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
- name: Login to ghcr.io
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels)
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Create & publish manifest
run: |
docker manifest create coollabsio/coolify:next --amend coollabsio/coolify:next-amd64 --amend coollabsio/coolify:next-arm64
docker manifest push coollabsio/coolify:next
docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }}
- uses: sarisia/actions-status-discord@v1
if: always()
with:

17
.gitignore vendored
View File

@@ -1,16 +1,25 @@
.DS_Store
node_modules
.pnpm-store
build
/apps/ui/build
/build
.svelte-kit
package
.env
.env.*
!.env.example
dist
client
apps/api/db/*.db
local-serve
apps/api/db/migration.db-journal
apps/api/core*
logs
apps/server/build
apps/backup/backups/*
!apps/backup/backups/.gitkeep
/logs
others/certificates
backups/*
!backups/.gitkeep
# Trpc
apps/server/db/*.db
apps/server/db/*.db-journal

32
.vscode/settings.json vendored
View File

@@ -1,11 +1,31 @@
{
"i18n-ally.localesPaths": ["src/lib/locales"],
"i18n-ally.localesPaths": [
"src/lib/locales"
],
"i18n-ally.keystyle": "nested",
"i18n-ally.extract.ignoredByFiles": {
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
"src\\routes\\__layout.svelte": [
"Coolify",
"coolLabs logo"
]
},
"i18n-ally.sourceLanguage": "en",
"i18n-ally.enabledFrameworks": ["svelte"],
"i18n-ally.enabledParsers": ["js", "ts", "json"],
"i18n-ally.extract.autoDetect": true
}
"i18n-ally.enabledFrameworks": [
"svelte"
],
"i18n-ally.enabledParsers": [
"js",
"ts",
"json"
],
"i18n-ally.extract.autoDetect": true,
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/Thumbs.db": true
},
"hide-files.files": []
}

View File

@@ -1,115 +1,48 @@
# Contribution
# Contributing
First, thanks for considering to contribute to my project. It really means a lot! :)
> "First, thanks for considering to contribute to my project.
It really means a lot! 😁" - [@andrasbacsai](https://github.com/andrasbacsai)
You can ask for guidance anytime on our Discord server in the #contribution channel.
You can ask for guidance anytime on our
[Discord server](https://coollabs.io/discord) in the `#contribution` channel.
## Setup your development environment
### Github codespaces
You'll need a set of skills to [get started](docs/contribution/GettingStarted.md).
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
## 1) Setup your development environment
### Gitpod
- 🌟 [Container based](docs/dev_setup/Container.md) ← *Recommended*
- 📦 [DockerContainer](docs/dev_setup/DockerContiner.md) *WIP
- 🐙 [Github Codespaces](docs/dev_setup/GithubCodespaces.md)
- ☁️ [GitPod](docs/dev_setup/GitPod.md)
- 🍏 [Local Mac](docs/dev_setup/Mac.md)
If you have a [Gitpod](https://gitpod.io), you can just create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
## 2) Basic requirements
### Local Machine
> At the moment, Coolify `doesn't support Windows`. You must use `Linux` or `MacOS` or consider using Gitpod or Github Codespaces.
- [Install Pnpm](https://pnpm.io/installation)
- [Install Docker Engine](https://docs.docker.com/engine/install/)
- [Setup Docker Compose Plugin](https://docs.docker.com/compose/install/)
- [Setup GIT LFS Support](https://git-lfs.github.com/)
- Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
## 3) Setup Coolify
- You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
- You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
- You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
- Copy `apps/api/.env.example` to `apps/api/.env`
- Edit `apps/api/.env`, set the `COOLIFY_APP_ID` environment variable to something cool.
- Run `pnpm install` to install dependencies.
- Run `pnpm db:push` to create a local SQlite database. This will apply all migrations at `db/dev.db`.
- Run `pnpm db:seed` seed the database.
- Run `pnpm dev` start coding.
Optional:
- To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
```sh
# Or... Copy and paste commands below:
cp apps/api/.env.example apps/api/.env
pnpm install
pnpm db:push
pnpm db:seed
pnpm dev
```
### Inside a Docker container
`WIP`
## 4) Start Coding
## Setup Coolify
- Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
- `pnpm install` to install dependencies.
- `pnpm db:push` to o create a local SQlite database.
You should be able to access `http://localhost:3000`.
This will apply all migrations at `db/dev.db`.
- `pnpm db:seed` seed the database.
- `pnpm dev` start coding.
## Technical skills required
- **Languages**: Node.js / Javascript / Typescript
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
- **Docker Engine API**
## Add a new service
### Which service is eligable to add to Coolify?
The following statements needs to be true:
- Self-hostable
- Open-source
- Maintained (I do not want to add software full of bugs)
### Create Prisma / Database schema for the new service.
All data that needs to be persist for a service should be saved to the database in `cleartext` or `encrypted`.
very password/api key/passphrase needs to be encrypted. If you are not sure, whether it should be encrypted or not, just encrypt it.
Update Prisma schema in [src/apps/api/prisma/schema.prisma](https://github.com/coollabsio/coolify/blob/main/apps/api/prisma/schema.prisma).
- Add new model with the new service name.
- Make a relationship with `Service` model.
- In the `Service` model, the name of the new field should be with low-capital.
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
Once done, create Prisma schema with `pnpm db:push`.
> You may also need to restart `Typescript Language Server` in your IDE to get the new types.
### Add available versions
Versions are hardcoded into Coolify at the moment and based on Docker image tags.
- Update `supportedServiceTypesAndVersions` function [here](apps/api/src/lib/services/supportedVersions.ts)
### Include the new service in queries
At [here](apps/api/src/lib/services/common.ts) in `includeServices` function add the new table name, so it will be included in all places in the database queries where it is required.
### Define auto-generated fields
At [here](apps/api/src/lib/services/common.ts) in `configureServiceType` function add the initial auto-generated details such as password, users etc, and the encryption process of secrets (if applicable).
### Define input field details
At [here](apps/api/src/lib/services/serviceFields.ts) add details about the input fields shown in the UI, so every component (API/UI) will know what to do with the values (decrypt/show it by default/readonly/etc).
### Define the start process
- At [here](apps/api/src/lib/services/handlers.ts), define how the service should start. It could be complex and based on `docker-compose` definitions.
> See `startUmamiService()` function as example.
- At [here](apps/api/src/routes/api/v1/services/handlers.ts), add the new start service process to `startService` function.
### Define the deletion process
[Here](apps/api/src/lib/services/common.ts) in `removeService` add the database deletion process.
### Custom logo
- At [here](apps/ui/src/lib/components/svg/services) add the service custom log as a Svelte component and export it [here](apps/ui/src/lib/components/svg/services/index.ts).
> SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
- At [here](apps/ui/src/lib/components/svg/services/ServiceIcons.svelte) include the new logo with `isAbsolute` property.
- At [here](apps/ui/src/routes/services/[id]/_ServiceLinks.svelte) add links to the documentation of the service.
### Custom fields on the UI
By default the URL and name are shown on the UI. Everything else needs to be added [here](apps/ui/src/routes/services/[id]/_Services/_Services.svelte)
> If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component [here](apps/ui/src/routes/services/[id]/_Services) with an underscore. For example, see other [here](apps/ui/src/routes/services/[id]/_Services/_Umami.svelte).
Good job! 👏
1. Click `Register` and setup your first user.

View File

@@ -1,5 +1,4 @@
ARG PNPM_VERSION=7.11.0
ARG NPM_VERSION=8.19.1
FROM node:18-slim as build
WORKDIR /app
@@ -17,30 +16,38 @@ WORKDIR /app
ENV NODE_ENV production
ARG TARGETPLATFORM
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
# https://download.docker.com/linux/static/stable/
ARG DOCKER_VERSION=20.10.18
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
ARG DOCKER_COMPOSE_VERSION=2.6.1
# https://github.com/buildpacks/pack/releases
ARG PACK_VERSION=0.27.0
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3 vim
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN npm install -g npm@${PNPM_VERSION}
RUN mkdir -p ~/.docker/cli-plugins/
# https://download.docker.com/linux/static/stable/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.6.1 -o ~/.docker/cli-plugins/docker-compose
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
RUN (curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.27.0/pack-v0.27.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack)
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
COPY --from=build /app/apps/api/build/ .
COPY --from=build /app/others/fluentbit/ ./fluentbit
# COPY --from=build /app/others/fluentbit/ ./fluentbit
COPY --from=build /app/apps/ui/build/ ./public
COPY --from=build /app/apps/api/prisma/ ./prisma
COPY --from=build /app/apps/api/package.json .
COPY --from=build /app/docker-compose.yaml .
COPY --from=build /app/apps/api/tags.json .
COPY --from=build /app/apps/api/templates.json .
RUN pnpm install -p
EXPOSE 3000
ENV CHECKPOINT_DISABLE=1
CMD pnpm start
CMD pnpm start

31
Dockerfile-dev Normal file
View File

@@ -0,0 +1,31 @@
FROM node:18-slim
ENV NODE_ENV development
ARG TARGETPLATFORM
ARG PNPM_VERSION=7.11.0
ARG NPM_VERSION=8.19.1
# https://download.docker.com/linux/static/stable/
ARG DOCKER_VERSION=20.10.18
# https://github.com/docker/compose/releases
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
ARG DOCKER_COMPOSE_VERSION=2.6.1
# https://github.com/buildpacks/pack/releases
ARG PACK_VERSION=0.27.0
WORKDIR /app
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
RUN npm install -g npm@${PNPM_VERSION}
RUN mkdir -p ~/.docker/cli-plugins/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-$DOCKER_COMPOSE_VERSION -o ~/.docker/cli-plugins/docker-compose
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /usr/local/bin/pack
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
EXPOSE 3000
ENV CHECKPOINT_DISABLE=1

View File

@@ -16,7 +16,7 @@ If you have a new service / build pack you would like to add, raise an idea [her
## How to install
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
For more details goto the [docs](https://docs.coollabs.io/coolify-v3/installation).
Installation is automated with the following command:
@@ -77,10 +77,11 @@ Deploy your resource to:
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
### Services
- [Appwrite](https://appwrite.io)
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
- [WordPress](https://docs.coollabs.io/coolify-v3/services/wordpress)
- [Ghost](https://ghost.org)
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
- [Plausible Analytics](https://docs.coollabs.io/coolify-v3/services/plausible-analytics)
- [NocoDB](https://nocodb.com)
- [VSCode Server](https://github.com/cdr/code-server)
- [MinIO](https://min.io)
@@ -93,28 +94,48 @@ Deploy your resource to:
- [Fider](https://fider.io)
- [Hasura](https://hasura.io)
- [GlitchTip](https://glitchtip.com)
## Migration from v1
A fresh installation is necessary. v2 and v3 are not compatible with v1.
- And more...
## Support
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
- Twitter: [@andrasbacsai](https://twitter.com/heyandras)
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
- Discord: [Invitation](https://coollabs.io/discord)
## Financial Contributors
---
## ⚗️ Expertise Contributions
Coolify is developed under the [Apache License](./LICENSE) and you can help to make it grow.
Our community will be glad to have you on board!
Learn how to contribute to Coolify as as ...
&rarr; [👩🏾‍💻 Software developer](./CONTRIBUTION.md)
&rarr; [🧑🏻‍🏫 Translator](./docs/contribution/Translating.md)
<!--
&rarr; 🧑🏽‍🎨 Designer
&rarr; 🙋‍♀️ Community Manager
&rarr; 🧙🏻‍♂️ Text Content Creator
&rarr; 👨🏼‍🎤 Video Content Creator
-->
---
## 💰 Financial Contributors
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
### Individuals
<a href="https://opencollective.com/coollabsio"><img src="https://opencollective.com/coollabsio/individuals.svg?width=890"></a>
### Organizations
Special thanks to our biggest sponsor, [CCCareers](https://cccareers.org/)!
![CCCareers](./others/logo/ccc-logo.webp)
Support this project with your organization. Your logo will show up here with a link to your website.
<a href="https://opencollective.com/coollabsio/organization/0/website"><img src="https://opencollective.com/coollabsio/organization/0/avatar.svg"></a>
@@ -127,3 +148,11 @@ Support this project with your organization. Your logo will show up here with a
<a href="https://opencollective.com/coollabsio/organization/7/website"><img src="https://opencollective.com/coollabsio/organization/7/avatar.svg"></a>
<a href="https://opencollective.com/coollabsio/organization/8/website"><img src="https://opencollective.com/coollabsio/organization/8/avatar.svg"></a>
<a href="https://opencollective.com/coollabsio/organization/9/website"><img src="https://opencollective.com/coollabsio/organization/9/avatar.svg"></a>
### Individuals
<a href="https://opencollective.com/coollabsio"><img src="https://opencollective.com/coollabsio/individuals.svg?width=890"></a>
## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=coollabsio/coolify&type=Date)](https://star-history.com/#coollabsio/coolify&Date)

View File

@@ -1,10 +1,9 @@
COOLIFY_APP_ID=local-dev
# 32 bits long secret key
COOLIFY_SECRET_KEY=12341234123412341234123412341234
COOLIFY_DATABASE_URL=file:../db/dev.db
COOLIFY_SENTRY_DSN=
COOLIFY_IS_ON=docker
COOLIFY_WHITE_LABELED=false
COOLIFY_WHITE_LABELED_ICON=
COOLIFY_AUTO_UPDATE=
COOLIFY_APP_ID=local-dev
# 32 bits long secret key
COOLIFY_SECRET_KEY=12341234123412341234123412341234
COOLIFY_DATABASE_URL=file:../db/dev.db
COOLIFY_IS_ON=docker
COOLIFY_WHITE_LABELED=false
COOLIFY_WHITE_LABELED_ICON=
COOLIFY_AUTO_UPDATE=

4
apps/api/.gitignore vendored
View File

@@ -8,4 +8,6 @@ package
!.env.example
dist
dev.db
client
client
testTemplate.yaml
testTags.json

1071
apps/api/devTags.json Normal file

File diff suppressed because it is too large Load Diff

3834
apps/api/devTemplates.yaml Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,11 @@
{
"watch": ["src"],
"ignore": ["src/**/*.test.ts"],
"ext": "ts,mjs,json,graphql",
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
"legacyWatch": true
}
"watch": [
"src"
],
"ignore": [
"src/**/*.test.ts"
],
"ext": "ts,mjs,json,graphql",
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --platform=node --outdir=build --format=cjs && node build",
"legacyWatch": true
}

View File

@@ -1,75 +1,83 @@
{
"name": "api",
"description": "Coolify's Fastify API",
"license": "Apache-2.0",
"scripts": {
"db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed",
"db:studio": "prisma studio",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"dev": "nodemon",
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
},
"dependencies": {
"@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.3.1",
"@fastify/cookie": "8.1.0",
"@fastify/cors": "8.1.0",
"@fastify/env": "4.1.0",
"@fastify/jwt": "6.3.2",
"@fastify/static": "6.5.0",
"@iarna/toml": "2.2.5",
"@ladjs/graceful": "3.0.2",
"@prisma/client": "4.3.1",
"axios": "0.27.2",
"bcryptjs": "2.4.3",
"bree": "9.1.2",
"cabin": "9.1.2",
"compare-versions": "5.0.1",
"csv-parse": "^5.3.0",
"csvtojson": "^2.0.10",
"cuid": "2.1.8",
"dayjs": "1.11.5",
"dockerode": "3.3.4",
"dotenv-extended": "2.9.0",
"execa": "6.1.0",
"fastify": "4.5.3",
"fastify-plugin": "4.2.1",
"generate-password": "1.7.0",
"got": "12.4.1",
"is-ip": "5.0.0",
"is-port-reachable": "4.0.0",
"js-yaml": "4.1.0",
"jsonwebtoken": "8.5.1",
"node-forge": "1.3.1",
"node-os-utils": "1.3.7",
"p-all": "4.0.0",
"p-throttle": "5.0.0",
"public-ip": "6.0.1",
"ssh-config": "4.1.6",
"strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1"
},
"devDependencies": {
"@types/node": "18.7.15",
"@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.36.2",
"@typescript-eslint/parser": "5.36.2",
"esbuild": "0.15.7",
"eslint": "8.23.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-prettier": "4.2.1",
"nodemon": "2.0.19",
"prettier": "2.7.1",
"prisma": "4.3.1",
"rimraf": "3.0.2",
"tsconfig-paths": "4.1.0",
"typescript": "4.8.2"
},
"prisma": {
"seed": "node prisma/seed.js"
}
}
"name": "api",
"description": "Coolify's Fastify API",
"license": "Apache-2.0",
"scripts": {
"db:generate": "prisma generate",
"db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed",
"db:studio": "prisma studio",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"dev": "nodemon",
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
},
"dependencies": {
"@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.7.0",
"@fastify/cookie": "8.3.0",
"@fastify/cors": "8.2.0",
"@fastify/env": "4.2.0",
"@fastify/jwt": "6.5.0",
"@fastify/multipart": "7.4.1",
"@fastify/static": "6.6.0",
"@iarna/toml": "2.2.5",
"@ladjs/graceful": "3.2.1",
"@prisma/client": "4.8.1",
"axe": "11.2.1",
"bcryptjs": "2.4.3",
"bree": "9.1.3",
"cabin": "11.1.1",
"compare-versions": "5.0.1",
"csv-parse": "5.3.3",
"csvtojson": "2.0.10",
"cuid": "2.1.8",
"dayjs": "1.11.7",
"dockerode": "3.3.4",
"dotenv-extended": "2.9.0",
"execa": "6.1.0",
"fastify": "4.11.0",
"fastify-plugin": "4.3.0",
"fastify-socket.io": "4.0.0",
"generate-password": "1.7.0",
"got": "12.5.3",
"is-ip": "5.0.0",
"is-port-reachable": "4.0.0",
"js-yaml": "4.1.0",
"jsonwebtoken": "9.0.0",
"minimist": "^1.2.7",
"node-forge": "1.3.1",
"node-os-utils": "1.3.7",
"p-all": "4.0.0",
"p-throttle": "5.0.0",
"prisma": "4.8.1",
"public-ip": "6.0.1",
"pump": "3.0.0",
"shell-quote": "^1.7.4",
"socket.io": "4.5.4",
"ssh-config": "4.2.0",
"strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1"
},
"devDependencies": {
"@types/node": "18.11.18",
"@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.48.1",
"@typescript-eslint/parser": "5.48.1",
"esbuild": "0.16.16",
"eslint": "8.31.0",
"eslint-config-prettier": "8.6.0",
"eslint-plugin-prettier": "4.2.1",
"nodemon": "2.0.20",
"prettier": "2.8.2",
"rimraf": "3.0.2",
"tsconfig-paths": "4.1.2",
"types-fastify-socket.io": "0.0.1",
"typescript": "4.9.4"
},
"prisma": {
"seed": "node prisma/seed.js"
}
}

View File

@@ -0,0 +1,10 @@
-- CreateTable
CREATE TABLE "Certificate" (
"id" TEXT NOT NULL PRIMARY KEY,
"key" TEXT NOT NULL,
"cert" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "Certificate_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);

View File

@@ -0,0 +1,23 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ApplicationSettings" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"debug" BOOLEAN NOT NULL DEFAULT false,
"previews" BOOLEAN NOT NULL DEFAULT false,
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
"isBot" BOOLEAN NOT NULL DEFAULT false,
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
DROP TABLE "ApplicationSettings";
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,26 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_GitSource" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"forPublic" BOOLEAN NOT NULL DEFAULT false,
"type" TEXT,
"apiUrl" TEXT,
"htmlUrl" TEXT,
"customPort" INTEGER NOT NULL DEFAULT 22,
"organization" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
DROP TABLE "GitSource";
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- DropIndex
DROP INDEX "PreviewApplication_applicationId_key";

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Build" ADD COLUMN "sourceRepository" TEXT;

View File

@@ -0,0 +1,3 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerComposeFile" TEXT;
ALTER TABLE "Application" ADD COLUMN "dockerComposeFileLocation" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerComposeConfiguration" TEXT;

View File

@@ -0,0 +1,13 @@
-- CreateTable
CREATE TABLE "ServiceSetting" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"name" TEXT NOT NULL,
"value" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ServiceSetting_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "ServiceSetting_serviceId_name_key" ON "ServiceSetting"("serviceId", "name");

View File

@@ -0,0 +1,19 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ServicePersistentStorage" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"path" TEXT NOT NULL,
"volumeName" TEXT,
"predefined" BOOLEAN NOT NULL DEFAULT false,
"containerId" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ServicePersistentStorage" ("createdAt", "id", "path", "serviceId", "updatedAt") SELECT "createdAt", "id", "path", "serviceId", "updatedAt" FROM "ServicePersistentStorage";
DROP TABLE "ServicePersistentStorage";
ALTER TABLE "new_ServicePersistentStorage" RENAME TO "ServicePersistentStorage";
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,24 @@
/*
Warnings:
- Added the required column `variableName` to the `ServiceSetting` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ServiceSetting" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"name" TEXT NOT NULL,
"value" TEXT NOT NULL,
"variableName" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ServiceSetting_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ServiceSetting" ("createdAt", "id", "name", "serviceId", "updatedAt", "value") SELECT "createdAt", "id", "name", "serviceId", "updatedAt", "value" FROM "ServiceSetting";
DROP TABLE "ServiceSetting";
ALTER TABLE "new_ServiceSetting" RENAME TO "ServiceSetting";
CREATE UNIQUE INDEX "ServiceSetting_serviceId_name_key" ON "ServiceSetting"("serviceId", "name");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,21 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Service" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"fqdn" TEXT,
"exposePort" INTEGER,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"type" TEXT,
"version" TEXT,
"templateVersion" TEXT NOT NULL DEFAULT '0.0.0',
"destinationDockerId" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Service_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_Service" ("createdAt", "destinationDockerId", "dualCerts", "exposePort", "fqdn", "id", "name", "type", "updatedAt", "version") SELECT "createdAt", "destinationDockerId", "dualCerts", "exposePort", "fqdn", "id", "name", "type", "updatedAt", "version" FROM "Service";
DROP TABLE "Service";
ALTER TABLE "new_Service" RENAME TO "Service";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,11 @@
/*
Warnings:
- A unique constraint covering the columns `[serviceId,containerId,path]` on the table `ServicePersistentStorage` will be added. If there are existing duplicate values, this will fail.
*/
-- DropIndex
DROP INDEX "ServicePersistentStorage_serviceId_path_key";
-- CreateIndex
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_containerId_path_key" ON "ServicePersistentStorage"("serviceId", "containerId", "path");

View File

@@ -0,0 +1,32 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Wordpress" (
"id" TEXT NOT NULL PRIMARY KEY,
"extraConfig" TEXT,
"tablePrefix" TEXT,
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
"mysqlHost" TEXT,
"mysqlPort" INTEGER,
"mysqlUser" TEXT,
"mysqlPassword" TEXT,
"mysqlRootUser" TEXT,
"mysqlRootUserPassword" TEXT,
"mysqlDatabase" TEXT,
"mysqlPublicPort" INTEGER,
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
"ftpUser" TEXT,
"ftpPassword" TEXT,
"ftpPublicPort" INTEGER,
"ftpHostKey" TEXT,
"ftpHostKeyPrivate" TEXT,
"serviceId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlHost", "mysqlPassword", "mysqlPort", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "ownMysql", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlHost", "mysqlPassword", "mysqlPort", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "ownMysql", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
DROP TABLE "Wordpress";
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Setting" ADD COLUMN "proxyDefaultRedirect" TEXT;

View File

@@ -0,0 +1,45 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"proxyDefaultRedirect" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"DNSServers" TEXT,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false
);
INSERT INTO "new_Setting" ("DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
CREATE TABLE "new_ApplicationPersistentStorage" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"path" TEXT NOT NULL,
"oldPath" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationPersistentStorage" ("applicationId", "createdAt", "id", "path", "updatedAt") SELECT "applicationId", "createdAt", "id", "path", "updatedAt" FROM "ApplicationPersistentStorage";
DROP TABLE "ApplicationPersistentStorage";
ALTER TABLE "new_ApplicationPersistentStorage" RENAME TO "ApplicationPersistentStorage";
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,37 @@
/*
Warnings:
- You are about to drop the column `proxyHash` on the `Setting` table. All the data in the column will be lost.
- You are about to drop the column `proxyPassword` on the `Setting` table. All the data in the column will be lost.
- You are about to drop the column `proxyUser` on the `Setting` table. All the data in the column will be lost.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,59 @@
-- CreateTable
CREATE TABLE "DockerRegistry" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"url" TEXT NOT NULL,
"username" TEXT,
"password" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Application" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"fqdn" TEXT,
"repository" TEXT,
"configHash" TEXT,
"branch" TEXT,
"buildPack" TEXT,
"projectId" INTEGER,
"port" INTEGER,
"exposePort" INTEGER,
"installCommand" TEXT,
"buildCommand" TEXT,
"startCommand" TEXT,
"baseDirectory" TEXT,
"publishDirectory" TEXT,
"deploymentType" TEXT,
"phpModules" TEXT,
"pythonWSGI" TEXT,
"pythonModule" TEXT,
"pythonVariable" TEXT,
"dockerFileLocation" TEXT,
"denoMainFile" TEXT,
"denoOptions" TEXT,
"dockerComposeFile" TEXT,
"dockerComposeFileLocation" TEXT,
"dockerComposeConfiguration" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"destinationDockerId" TEXT,
"gitSourceId" TEXT,
"baseImage" TEXT,
"baseBuildImage" TEXT,
"dockerRegistryId" TEXT NOT NULL DEFAULT '0',
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
DROP TABLE "Application";
ALTER TABLE "new_Application" RENAME TO "Application";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,30 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,60 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT,
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", coalesce("isAPIDebuggingEnabled", false) AS "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
CREATE TABLE "new_GlitchTip" (
"id" TEXT NOT NULL PRIMARY KEY,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"secretKeyBase" TEXT,
"defaultEmail" TEXT NOT NULL,
"defaultUsername" TEXT NOT NULL,
"defaultPassword" TEXT NOT NULL,
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
"emailSmtpPort" INTEGER DEFAULT 25,
"emailSmtpUser" TEXT,
"emailSmtpPassword" TEXT,
"emailSmtpUseTls" BOOLEAN NOT NULL DEFAULT false,
"emailSmtpUseSsl" BOOLEAN NOT NULL DEFAULT false,
"emailBackend" TEXT,
"mailgunApiKey" TEXT,
"sendgridApiKey" TEXT,
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
"serviceId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_GlitchTip" ("createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUseSsl", "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt") SELECT "createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", coalesce("emailSmtpUseSsl", false) AS "emailSmtpUseSsl", coalesce("emailSmtpUseTls", false) AS "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt" FROM "GlitchTip";
DROP TABLE "GlitchTip";
ALTER TABLE "new_GlitchTip" RENAME TO "GlitchTip";
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Setting" ADD COLUMN "sentryDSN" TEXT;

View File

@@ -0,0 +1,31 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"sentryDSN" TEXT,
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT coalesce("DNSServers", '1.1.1.1,8.8.8.8') AS "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,33 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
"ipv4" TEXT,
"ipv6" TEXT,
"arch" TEXT,
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
"numberOfDockerImagesKeptLocally" INTEGER NOT NULL DEFAULT 3,
"proxyDefaultRedirect" TEXT,
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
"sentryDSN" TEXT,
"previewSeparator" TEXT NOT NULL DEFAULT '.',
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", 3, "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "gitCommitHash" TEXT;

View File

@@ -0,0 +1,66 @@
/*
Warnings:
- You are about to drop the column `isSystemWide` on the `DockerRegistry` table. All the data in the column will be lost.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_DockerRegistry" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"url" TEXT NOT NULL,
"username" TEXT,
"password" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"teamId" TEXT,
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_DockerRegistry" ("createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username") SELECT "createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username" FROM "DockerRegistry";
DROP TABLE "DockerRegistry";
ALTER TABLE "new_DockerRegistry" RENAME TO "DockerRegistry";
CREATE TABLE "new_Application" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"fqdn" TEXT,
"repository" TEXT,
"configHash" TEXT,
"branch" TEXT,
"buildPack" TEXT,
"projectId" INTEGER,
"port" INTEGER,
"exposePort" INTEGER,
"installCommand" TEXT,
"buildCommand" TEXT,
"startCommand" TEXT,
"baseDirectory" TEXT,
"publishDirectory" TEXT,
"deploymentType" TEXT,
"phpModules" TEXT,
"pythonWSGI" TEXT,
"pythonModule" TEXT,
"pythonVariable" TEXT,
"dockerFileLocation" TEXT,
"denoMainFile" TEXT,
"denoOptions" TEXT,
"dockerComposeFile" TEXT,
"dockerComposeFileLocation" TEXT,
"dockerComposeConfiguration" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"destinationDockerId" TEXT,
"gitSourceId" TEXT,
"gitCommitHash" TEXT,
"baseImage" TEXT,
"baseBuildImage" TEXT,
"dockerRegistryId" TEXT,
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
DROP TABLE "Application";
ALTER TABLE "new_Application" RENAME TO "Application";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "simpleDockerfile" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerRegistryImageName" TEXT;

View File

@@ -0,0 +1,27 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_GitSource" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"forPublic" BOOLEAN NOT NULL DEFAULT false,
"type" TEXT,
"apiUrl" TEXT,
"htmlUrl" TEXT,
"customPort" INTEGER NOT NULL DEFAULT 22,
"customUser" TEXT NOT NULL DEFAULT 'git',
"organization" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt" FROM "GitSource";
DROP TABLE "GitSource";
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ApplicationSettings" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"debug" BOOLEAN NOT NULL DEFAULT false,
"previews" BOOLEAN NOT NULL DEFAULT false,
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
"isBot" BOOLEAN NOT NULL DEFAULT false,
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
"isHttp2" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
DROP TABLE "ApplicationSettings";
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "ApplicationPersistentStorage" ADD COLUMN "hostPath" TEXT;

View File

@@ -0,0 +1,29 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "basicAuthPw" TEXT;
ALTER TABLE "Application" ADD COLUMN "basicAuthUser" TEXT;
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ApplicationSettings" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"debug" BOOLEAN NOT NULL DEFAULT false,
"previews" BOOLEAN NOT NULL DEFAULT false,
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
"isBot" BOOLEAN NOT NULL DEFAULT false,
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
"isHttp2" BOOLEAN NOT NULL DEFAULT false,
"basicAuth" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isHttp2", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isHttp2", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
DROP TABLE "ApplicationSettings";
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -8,27 +8,40 @@ datasource db {
url = env("COOLIFY_DATABASE_URL")
}
model Certificate {
id String @id @default(cuid())
key String
cert String
team Team? @relation(fields: [teamId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
teamId String?
}
model Setting {
id String @id @default(cuid())
fqdn String? @unique
isAPIDebuggingEnabled Boolean? @default(false)
isRegistrationEnabled Boolean @default(false)
dualCerts Boolean @default(false)
minPort Int @default(9000)
maxPort Int @default(9100)
proxyPassword String
proxyUser String
proxyHash String?
isAutoUpdateEnabled Boolean @default(false)
isDNSCheckEnabled Boolean @default(true)
DNSServers String?
isTraefikUsed Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
ipv4 String?
ipv6 String?
arch String?
concurrentBuilds Int @default(1)
id String @id @default(cuid())
fqdn String? @unique
dualCerts Boolean @default(false)
minPort Int @default(9000)
maxPort Int @default(9100)
DNSServers String @default("1.1.1.1,8.8.8.8")
ipv4 String?
ipv6 String?
arch String?
concurrentBuilds Int @default(1)
applicationStoragePathMigrationFinished Boolean @default(false)
numberOfDockerImagesKeptLocally Int @default(3)
proxyDefaultRedirect String?
doNotTrack Boolean @default(false)
sentryDSN String?
previewSeparator String @default(".")
isAPIDebuggingEnabled Boolean @default(false)
isRegistrationEnabled Boolean @default(true)
isAutoUpdateEnabled Boolean @default(false)
isDNSCheckEnabled Boolean @default(true)
isTraefikUsed Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model User {
@@ -70,6 +83,8 @@ model Team {
gitLabApps GitlabApp[]
service Service[]
users User[]
certificate Certificate[]
dockerRegistry DockerRegistry[]
}
model TeamInvitation {
@@ -83,43 +98,54 @@ model TeamInvitation {
}
model Application {
id String @id @default(cuid())
name String
fqdn String?
repository String?
configHash String?
branch String?
buildPack String?
projectId Int?
port Int?
exposePort Int?
installCommand String?
buildCommand String?
startCommand String?
baseDirectory String?
publishDirectory String?
deploymentType String?
phpModules String?
pythonWSGI String?
pythonModule String?
pythonVariable String?
dockerFileLocation String?
denoMainFile String?
denoOptions String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
destinationDockerId String?
gitSourceId String?
baseImage String?
baseBuildImage String?
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
persistentStorage ApplicationPersistentStorage[]
settings ApplicationSettings?
secrets Secret[]
teams Team[]
connectedDatabase ApplicationConnectedDatabase?
previewApplication PreviewApplication[]
id String @id @default(cuid())
name String
fqdn String?
repository String?
configHash String?
branch String?
buildPack String?
projectId Int?
port Int?
exposePort Int?
installCommand String?
buildCommand String?
startCommand String?
baseDirectory String?
publishDirectory String?
deploymentType String?
phpModules String?
pythonWSGI String?
pythonModule String?
pythonVariable String?
dockerFileLocation String?
denoMainFile String?
denoOptions String?
dockerComposeFile String?
dockerComposeFileLocation String?
dockerComposeConfiguration String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
destinationDockerId String?
gitSourceId String?
gitCommitHash String?
baseImage String?
baseBuildImage String?
settings ApplicationSettings?
dockerRegistryId String?
dockerRegistryImageName String?
simpleDockerfile String?
basicAuthUser String?
basicAuthPw String?
persistentStorage ApplicationPersistentStorage[]
secrets Secret[]
teams Team[]
connectedDatabase ApplicationConnectedDatabase?
previewApplication PreviewApplication[]
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
dockerRegistry DockerRegistry? @relation(fields: [dockerRegistryId], references: [id])
}
model PreviewApplication {
@@ -128,7 +154,7 @@ model PreviewApplication {
sourceBranch String
isRandomDomain Boolean @default(false)
customDomain String?
applicationId String @unique
applicationId String
application Application @relation(fields: [applicationId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -161,6 +187,9 @@ model ApplicationSettings {
isBot Boolean @default(false)
isPublicRepository Boolean @default(false)
isDBBranching Boolean @default(false)
isCustomSSL Boolean @default(false)
isHttp2 Boolean @default(false)
basicAuth Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
application Application @relation(fields: [applicationId], references: [id])
@@ -169,7 +198,9 @@ model ApplicationSettings {
model ApplicationPersistentStorage {
id String @id @default(cuid())
applicationId String
hostPath String?
path String
oldPath Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
application Application @relation(fields: [applicationId], references: [id])
@@ -178,14 +209,17 @@ model ApplicationPersistentStorage {
}
model ServicePersistentStorage {
id String @id @default(cuid())
serviceId String
path String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
service Service @relation(fields: [serviceId], references: [id])
id String @id @default(cuid())
serviceId String
path String
volumeName String?
predefined Boolean @default(false)
containerId String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
service Service @relation(fields: [serviceId], references: [id])
@@unique([serviceId, path])
@@unique([serviceId, containerId, path])
}
model Secret {
@@ -235,6 +269,7 @@ model Build {
previewApplicationId String?
forceRebuild Boolean @default(false)
sourceBranch String?
sourceRepository String?
branch String?
status String? @default("queued")
createdAt DateTime @default(now())
@@ -274,6 +309,19 @@ model SshKey {
destinationDocker DestinationDocker[]
}
model DockerRegistry {
id String @id @default(cuid())
name String
url String
username String?
password String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
teamId String?
team Team? @relation(fields: [teamId], references: [id])
application Application[]
}
model GitSource {
id String @id @default(cuid())
name String
@@ -282,11 +330,13 @@ model GitSource {
apiUrl String?
htmlUrl String?
customPort Int @default(22)
customUser String @default("git")
organization String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
githubAppId String? @unique
gitlabAppId String? @unique
isSystemWide Boolean @default(false)
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
application Application[]
@@ -375,12 +425,14 @@ model Service {
dualCerts Boolean @default(false)
type String?
version String?
templateVersion String @default("0.0.0")
destinationDockerId String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
persistentStorage ServicePersistentStorage[]
serviceSecret ServiceSecret[]
serviceSetting ServiceSetting[]
teams Team[]
fider Fider?
@@ -400,6 +452,19 @@ model Service {
taiga Taiga?
}
model ServiceSetting {
id String @id @default(cuid())
serviceId String
name String
value String
variableName String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
service Service @relation(fields: [serviceId], references: [id])
@@unique([serviceId, name])
}
model PlausibleAnalytics {
id String @id @default(cuid())
email String?
@@ -445,10 +510,10 @@ model Wordpress {
ownMysql Boolean @default(false)
mysqlHost String?
mysqlPort Int?
mysqlUser String
mysqlPassword String
mysqlRootUser String
mysqlRootUserPassword String
mysqlUser String?
mysqlPassword String?
mysqlRootUser String?
mysqlRootUserPassword String?
mysqlDatabase String?
mysqlPublicPort Int?
ftpEnabled Boolean @default(false)
@@ -588,8 +653,8 @@ model GlitchTip {
emailSmtpPort Int? @default(25)
emailSmtpUser String?
emailSmtpPassword String?
emailSmtpUseTls Boolean? @default(false)
emailSmtpUseSsl Boolean? @default(false)
emailSmtpUseTls Boolean @default(false)
emailSmtpUseSsl Boolean @default(false)
emailBackend String?
mailgunApiKey String?
sendgridApiKey String?

View File

@@ -1,18 +1,8 @@
const dotEnvExtended = require('dotenv-extended');
dotEnvExtended.load();
const crypto = require('crypto');
const generator = require('generate-password');
const cuid = require('cuid');
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
function generatePassword(length = 24) {
return generator.generate({
length,
numbers: true,
strict: true
});
}
const algorithm = 'aes-256-ctr';
async function main() {
@@ -21,11 +11,8 @@ async function main() {
if (!settingsFound) {
await prisma.setting.create({
data: {
isRegistrationEnabled: true,
proxyPassword: encrypt(generatePassword()),
proxyUser: cuid(),
arch: process.arch,
DNSServers: '1.1.1.1,8.8.8.8'
id: '0',
arch: process.arch
}
});
} else {
@@ -34,11 +21,11 @@ async function main() {
id: settingsFound.id
},
data: {
isTraefikUsed: true,
proxyHash: null
id: '0'
}
});
}
// Create local docker engine
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine: '/var/run/docker.sock' }
});
@@ -55,23 +42,18 @@ async function main() {
// Set auto-update based on env variable
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
const settings = await prisma.setting.findFirst({});
if (settings) {
await prisma.setting.update({
where: {
id: settings.id
},
data: {
isAutoUpdateEnabled
}
});
}
await prisma.setting.update({
where: {
id: '0'
},
data: {
isAutoUpdateEnabled
}
});
// Create public github source
const github = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://github.com', forPublic: true }
});
const gitlab = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
});
if (!github) {
await prisma.gitSource.create({
data: {
@@ -83,6 +65,10 @@ async function main() {
}
});
}
// Create public gitlab source
const gitlab = await prisma.gitSource.findFirst({
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
});
if (!gitlab) {
await prisma.gitSource.create({
data: {
@@ -94,7 +80,296 @@ async function main() {
}
});
}
// Set new preview secrets
const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } });
if (secrets.length > 0) {
for (const secret of secrets) {
const previewSecrets = await prisma.secret.findMany({
where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true }
});
if (previewSecrets.length === 0) {
await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } });
}
}
}
}
async function reEncryptSecrets() {
const { execaCommand } = await import('execa');
const image = await execaCommand("docker inspect coolify --format '{{ .Config.Image }}'", {
shell: true
});
const version = image.stdout.split(':')[1] ?? null;
const date = new Date().getTime();
let backupfile = `/app/db/prod.db_${date}`;
if (version) {
backupfile = `/app/db/prod.db_${version}_${date}`;
}
await execaCommand('env | grep "^COOLIFY" | sort > .env', {
shell: true
});
const secretOld = process.env['COOLIFY_SECRET_KEY'];
let secretNew = process.env['COOLIFY_SECRET_KEY_BETTER'];
if (!secretNew) {
console.log('No COOLIFY_SECRET_KEY_BETTER found... Generating new one...');
const { stdout: newKey } = await execaCommand(
'openssl rand -base64 1024 | sha256sum | base64 | head -c 32',
{ shell: true }
);
secretNew = newKey;
}
if (secretOld !== secretNew) {
console.log(`Backup database to ${backupfile}.`);
await execaCommand(`cp /app/db/prod.db ${backupfile}`, { shell: true });
console.log(
'Secrets (COOLIFY_SECRET_KEY & COOLIFY_SECRET_KEY_BETTER) are different, so re-encrypting everything...'
);
await execaCommand(`sed -i '/COOLIFY_SECRET_KEY=/d' .env`, { shell: true });
await execaCommand(`sed -i '/COOLIFY_SECRET_KEY_BETTER=/d' .env`, { shell: true });
await execaCommand(`echo "COOLIFY_SECRET_KEY=${secretNew}" >> .env`, { shell: true });
await execaCommand('echo "COOLIFY_SECRET_KEY_BETTER=' + secretNew + '" >> .env ', {
shell: true
});
await execaCommand(`echo "COOLIFY_SECRET_KEY_OLD_${date}=${secretOld}" >> .env`, {
shell: true
});
const transactions = [];
const secrets = await prisma.secret.findMany();
if (secrets.length > 0) {
for (const secret of secrets) {
try {
const value = decrypt(secret.value, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.secret.update({
where: { id: secret.id },
data: { value: newValue }
})
);
} catch (e) {
console.log(e);
}
}
}
const serviceSecrets = await prisma.serviceSecret.findMany();
if (serviceSecrets.length > 0) {
for (const secret of serviceSecrets) {
try {
const value = decrypt(secret.value, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.serviceSecret.update({
where: { id: secret.id },
data: { value: newValue }
})
);
} catch (e) {
console.log(e);
}
}
}
const gitlabApps = await prisma.gitlabApp.findMany();
if (gitlabApps.length > 0) {
for (const gitlabApp of gitlabApps) {
try {
const value = decrypt(gitlabApp.privateSshKey, secretOld);
const newValue = encrypt(value, secretNew);
const appSecret = decrypt(gitlabApp.appSecret, secretOld);
const newAppSecret = encrypt(appSecret, secretNew);
transactions.push(
prisma.gitlabApp.update({
where: { id: gitlabApp.id },
data: { privateSshKey: newValue, appSecret: newAppSecret }
})
);
} catch (e) {
console.log(e);
}
}
}
const githubApps = await prisma.githubApp.findMany();
if (githubApps.length > 0) {
for (const githubApp of githubApps) {
try {
const clientSecret = decrypt(githubApp.clientSecret, secretOld);
const newClientSecret = encrypt(clientSecret, secretNew);
const webhookSecret = decrypt(githubApp.webhookSecret, secretOld);
const newWebhookSecret = encrypt(webhookSecret, secretNew);
const privateKey = decrypt(githubApp.privateKey, secretOld);
const newPrivateKey = encrypt(privateKey, secretNew);
transactions.push(
prisma.githubApp.update({
where: { id: githubApp.id },
data: {
clientSecret: newClientSecret,
webhookSecret: newWebhookSecret,
privateKey: newPrivateKey
}
})
);
} catch (e) {
console.log(e);
}
}
}
const databases = await prisma.database.findMany();
if (databases.length > 0) {
for (const database of databases) {
try {
const dbUserPassword = decrypt(database.dbUserPassword, secretOld);
const newDbUserPassword = encrypt(dbUserPassword, secretNew);
const rootUserPassword = decrypt(database.rootUserPassword, secretOld);
const newRootUserPassword = encrypt(rootUserPassword, secretNew);
transactions.push(
prisma.database.update({
where: { id: database.id },
data: {
dbUserPassword: newDbUserPassword,
rootUserPassword: newRootUserPassword
}
})
);
} catch (e) {
console.log(e);
}
}
}
const databaseSecrets = await prisma.databaseSecret.findMany();
if (databaseSecrets.length > 0) {
for (const databaseSecret of databaseSecrets) {
try {
const value = decrypt(databaseSecret.value, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.databaseSecret.update({
where: { id: databaseSecret.id },
data: { value: newValue }
})
);
} catch (e) {
console.log(e);
}
}
}
const wordpresses = await prisma.wordpress.findMany();
if (wordpresses.length > 0) {
for (const wordpress of wordpresses) {
try {
const value = decrypt(wordpress.ftpHostKey, secretOld);
const newValue = encrypt(value, secretNew);
const ftpHostKeyPrivate = decrypt(wordpress.ftpHostKeyPrivate, secretOld);
const newFtpHostKeyPrivate = encrypt(ftpHostKeyPrivate, secretNew);
let newFtpPassword = undefined;
if (wordpress.ftpPassword != null) {
const ftpPassword = decrypt(wordpress.ftpPassword, secretOld);
newFtpPassword = encrypt(ftpPassword, secretNew);
}
transactions.push(
prisma.wordpress.update({
where: { id: wordpress.id },
data: {
ftpHostKey: newValue,
ftpHostKeyPrivate: newFtpHostKeyPrivate,
ftpPassword: newFtpPassword
}
})
);
} catch (e) {
console.log(e);
}
}
}
const sshKeys = await prisma.sshKey.findMany();
if (sshKeys.length > 0) {
for (const key of sshKeys) {
try {
const value = decrypt(key.privateKey, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.sshKey.update({
where: { id: key.id },
data: {
privateKey: newValue
}
})
);
} catch (e) {
console.log(e);
}
}
}
const dockerRegistries = await prisma.dockerRegistry.findMany();
if (dockerRegistries.length > 0) {
for (const registry of dockerRegistries) {
try {
const value = decrypt(registry.password, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.dockerRegistry.update({
where: { id: registry.id },
data: {
password: newValue
}
})
);
} catch (e) {
console.log(e);
}
}
}
const certificates = await prisma.certificate.findMany();
if (certificates.length > 0) {
for (const certificate of certificates) {
try {
const value = decrypt(certificate.key, secretOld);
const newValue = encrypt(value, secretNew);
transactions.push(
prisma.certificate.update({
where: { id: certificate.id },
data: {
key: newValue
}
})
);
} catch (e) {
console.log(e);
}
}
}
await prisma.$transaction(transactions);
} else {
console.log('secrets are the same, so no need to re-encrypt');
}
}
const encrypt = (text, secret) => {
if (text && secret) {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, secret, iv);
const encrypted = Buffer.concat([cipher.update(text.trim()), cipher.final()]);
return JSON.stringify({
iv: iv.toString('hex'),
content: encrypted.toString('hex')
});
}
};
const decrypt = (hashString, secret) => {
if (hashString && secret) {
const hash = JSON.parse(hashString);
const decipher = crypto.createDecipheriv(algorithm, secret, Buffer.from(hash.iv, 'hex'));
const decrpyted = Buffer.concat([
decipher.update(Buffer.from(hash.content, 'hex')),
decipher.final()
]);
if (/<2F>/.test(decrpyted.toString())) {
throw new Error('Invalid secret. Skipping...');
}
return decrpyted.toString();
}
};
main()
.catch((e) => {
console.error(e);
@@ -103,15 +378,11 @@ main()
.finally(async () => {
await prisma.$disconnect();
});
const encrypt = (text) => {
if (text) {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv);
const encrypted = Buffer.concat([cipher.update(text), cipher.final()]);
return JSON.stringify({
iv: iv.toString('hex'),
content: encrypted.toString('hex')
});
}
};
reEncryptSecrets()
.catch((e) => {
console.error(e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

View File

@@ -0,0 +1,67 @@
import fs from 'fs/promises';
import yaml from 'js-yaml';
import got from 'got';
const repositories = [];
const templates = await fs.readFile('./apps/api/devTemplates.yaml', 'utf8');
const devTemplates = yaml.load(templates);
for (const template of devTemplates) {
let image = template.services['$$id'].image.replaceAll(':$$core_version', '');
if (!image.includes('/')) {
image = `library/${image}`;
}
repositories.push({ image, name: template.type });
}
const services = []
const numberOfTags = 30;
// const semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/g)
for (const repository of repositories) {
console.log('Querying', repository.name, 'at', repository.image);
let semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)$/g)
if (repository.name.startsWith('wordpress')) {
semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-php(0|[1-9]\d*)$/g)
}
if (repository.name.startsWith('minio')) {
semverRegex = new RegExp(/^RELEASE.*$/g)
}
if (repository.name.startsWith('fider')) {
semverRegex = new RegExp(/^v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-([0-9]+)$/g)
}
if (repository.name.startsWith('searxng')) {
semverRegex = new RegExp(/^\d{4}[\.\-](0?[1-9]|[12][0-9]|3[01])[\.\-](0?[1-9]|1[012]).*$/)
}
if (repository.name.startsWith('umami')) {
semverRegex = new RegExp(/^postgresql-v?(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)-([0-9]+)$/g)
}
if (repository.image.includes('ghcr.io')) {
const { execaCommand } = await import('execa');
const { stdout } = await execaCommand(`docker run --rm quay.io/skopeo/stable list-tags docker://${repository.image}`);
if (stdout) {
const json = JSON.parse(stdout);
const semverTags = json.Tags.filter((tag) => semverRegex.test(tag))
let tags = semverTags.length > 10 ? semverTags.sort().reverse().slice(0, numberOfTags) : json.Tags.sort().reverse().slice(0, numberOfTags)
if (!tags.includes('latest')) {
tags.push('latest')
}
services.push({ name: repository.name, image: repository.image, tags })
}
} else {
const { token } = await got.get(`https://auth.docker.io/token?service=registry.docker.io&scope=repository:${repository.image}:pull`).json()
let data = await got.get(`https://registry-1.docker.io/v2/${repository.image}/tags/list`, {
headers: {
Authorization: `Bearer ${token}`
}
}).json()
const semverTags = data.tags.filter((tag) => semverRegex.test(tag))
let tags = semverTags.length > 10 ? semverTags.sort().reverse().slice(0, numberOfTags) : data.tags.sort().reverse().slice(0, numberOfTags)
if (!tags.includes('latest')) {
tags.push('latest')
}
services.push({
name: repository.name,
image: repository.image,
tags
})
}
}
await fs.writeFile('./apps/api/devTags.json', JSON.stringify(services));

View File

@@ -1,54 +1,79 @@
import Fastify from 'fastify';
import cors from '@fastify/cors';
import serve from '@fastify/static';
import env from '@fastify/env';
import cookie from '@fastify/cookie';
import path, { join } from 'path';
import autoLoad from '@fastify/autoload';
import { asyncExecShell, createRemoteEngineConfiguration, getDomain, isDev, listSettings, prisma, version } from './lib/common';
import { scheduler } from './lib/scheduler';
import cookie from '@fastify/cookie';
import cors from '@fastify/cors';
import env from '@fastify/env';
import multipart from '@fastify/multipart';
import serve from '@fastify/static';
import Fastify from 'fastify';
import socketIO from 'fastify-socket.io';
import path, { join } from 'path';
import socketIOServer from './realtime';
import Graceful from '@ladjs/graceful';
import { compareVersions } from 'compare-versions';
import Graceful from '@ladjs/graceful'
import fs from 'fs/promises';
import yaml from 'js-yaml';
import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
import {
cleanupDockerStorage,
createRemoteEngineConfiguration,
decrypt,
executeCommand,
generateDatabaseConfiguration,
isDev,
listSettings,
prisma,
startTraefikProxy,
startTraefikTCPProxy,
version
} from './lib/common';
import { checkContainer } from './lib/docker';
import { scheduler } from './lib/scheduler';
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
declare module 'fastify' {
interface FastifyInstance {
config: {
COOLIFY_APP_ID: string,
COOLIFY_SECRET_KEY: string,
COOLIFY_DATABASE_URL: string,
COOLIFY_SENTRY_DSN: string,
COOLIFY_IS_ON: string,
COOLIFY_WHITE_LABELED: string,
COOLIFY_WHITE_LABELED_ICON: string | null,
COOLIFY_AUTO_UPDATE: string,
COOLIFY_APP_ID: string;
COOLIFY_SECRET_KEY: string;
COOLIFY_SECRET_KEY_BETTER: string | null;
COOLIFY_DATABASE_URL: string;
COOLIFY_IS_ON: string;
COOLIFY_WHITE_LABELED: string;
COOLIFY_WHITE_LABELED_ICON: string | null;
COOLIFY_AUTO_UPDATE: string;
};
}
}
const port = isDev ? 3001 : 3000;
const host = '0.0.0.0';
prisma.setting.findFirst().then(async (settings) => {
(async () => {
const settings = await prisma.setting.findFirst();
const fastify = Fastify({
logger: settings?.isAPIDebuggingEnabled || false,
trustProxy: true
});
const schema = {
type: 'object',
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
properties: {
COOLIFY_APP_ID: {
type: 'string',
type: 'string'
},
COOLIFY_SECRET_KEY: {
type: 'string'
},
COOLIFY_SECRET_KEY_BETTER: {
type: 'string',
default: null
},
COOLIFY_DATABASE_URL: {
type: 'string',
default: 'file:../db/dev.db'
},
COOLIFY_SENTRY_DSN: {
type: 'string',
default: null
},
COOLIFY_IS_ON: {
type: 'string',
default: 'docker'
@@ -64,11 +89,9 @@ prisma.setting.findFirst().then(async (settings) => {
COOLIFY_AUTO_UPDATE: {
type: 'string',
default: 'false'
},
}
}
};
const options = {
schema,
dotenv: true
@@ -88,39 +111,48 @@ prisma.setting.findFirst().then(async (settings) => {
return reply.status(200).sendFile('index.html');
});
}
fastify.register(multipart, { limits: { fileSize: 100000 } });
fastify.register(autoLoad, {
dir: join(__dirname, 'plugins')
});
fastify.register(autoLoad, {
dir: join(__dirname, 'routes')
});
fastify.register(cookie)
fastify.register(cookie);
fastify.register(cors);
fastify.addHook('onRequest', async (request, reply) => {
let allowedList = ['coolify:3000'];
const { ipv4, ipv6, fqdn } = await prisma.setting.findFirst({})
fastify.register(socketIO, {
cors: {
origin: isDev ? '*' : ''
}
});
// To detect allowed origins
// fastify.addHook('onRequest', async (request, reply) => {
// console.log(request.headers.host)
// let allowedList = ['coolify:3000'];
// const { ipv4, ipv6, fqdn } = await prisma.setting.findFirst({})
ipv4 && allowedList.push(`${ipv4}:3000`);
ipv6 && allowedList.push(ipv6);
fqdn && allowedList.push(getDomain(fqdn));
isDev && allowedList.push('localhost:3000') && allowedList.push('localhost:3001') && allowedList.push('host.docker.internal:3001');
const remotes = await prisma.destinationDocker.findMany({ where: { remoteEngine: true, remoteVerified: true } })
if (remotes.length > 0) {
remotes.forEach(remote => {
allowedList.push(`${remote.remoteIpAddress}:3000`);
})
}
if (!allowedList.includes(request.headers.host)) {
// console.log('not allowed', request.headers.host)
}
})
fastify.listen({ port, host }, async (err: any, address: any) => {
if (err) {
console.error(err);
process.exit(1);
}
// ipv4 && allowedList.push(`${ipv4}:3000`);
// ipv6 && allowedList.push(ipv6);
// fqdn && allowedList.push(getDomain(fqdn));
// isDev && allowedList.push('localhost:3000') && allowedList.push('localhost:3001') && allowedList.push('host.docker.internal:3001');
// const remotes = await prisma.destinationDocker.findMany({ where: { remoteEngine: true, remoteVerified: true } })
// if (remotes.length > 0) {
// remotes.forEach(remote => {
// allowedList.push(`${remote.remoteIpAddress}:3000`);
// })
// }
// if (!allowedList.includes(request.headers.host)) {
// // console.log('not allowed', request.headers.host)
// }
// })
try {
await fastify.listen({ port, host });
await socketIOServer(fastify);
console.log(`Coolify's API is listening on ${host}:${port}`);
migrateServicesToNewTemplate();
await migrateApplicationPersistentStorage();
await initServer();
const graceful = new Graceful({ brees: [scheduler] });
@@ -130,84 +162,509 @@ prisma.setting.findFirst().then(async (settings) => {
if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication');
}
if (!scheduler.workers.has('infrastructure')) {
scheduler.run('infrastructure');
}
}, 2000)
}, 2000);
// autoUpdater
setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:autoUpdater")
}, isDev ? 5000 : 60000 * 15)
await autoUpdater();
}, 60000 * 60);
// cleanupStorage
setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupStorage")
}, isDev ? 6000 : 60000 * 10)
await cleanupStorage();
}, 60000 * 15);
// checkProxies
// Cleanup stucked containers (not defined in Coolify, but still running and managed by Coolify)
setInterval(async () => {
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkProxies")
}, 10000)
await cleanupStuckedContainers();
}, 60000);
// cleanupPrismaEngines
// setInterval(async () => {
// scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupPrismaEngines")
// }, 60000)
// checkProxies, checkFluentBit & refresh templates
setInterval(async () => {
await checkProxies();
await checkFluentBit();
}, 60000);
// Refresh and check templates
setInterval(async () => {
await refreshTemplates();
}, 60000 * 10);
setInterval(async () => {
await refreshTags();
}, 60000 * 10);
setInterval(
async () => {
await migrateServicesToNewTemplate();
},
isDev ? 10000 : 60000 * 10
);
setInterval(async () => {
await copySSLCertificates();
}, 10000);
await Promise.all([
getTagsTemplates(),
getArch(),
getIPAddress(),
configureRemoteDockers(),
])
});
})
refreshTemplates(),
refreshTags()
// cleanupStuckedContainers()
]);
} catch (error) {
console.error(error);
process.exit(1);
}
})();
async function getIPAddress() {
const { publicIpv4, publicIpv6 } = await import('public-ip')
const { publicIpv4, publicIpv6 } = await import('public-ip');
try {
const settings = await listSettings();
if (!settings.ipv4) {
const ipv4 = await publicIpv4({ timeout: 2000 })
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
const ipv4 = await publicIpv4({ timeout: 2000 });
console.log(`Getting public IPv4 address...`);
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } });
}
if (!settings.ipv6) {
const ipv6 = await publicIpv6({ timeout: 2000 })
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
const ipv6 = await publicIpv6({ timeout: 2000 });
console.log(`Getting public IPv6 address...`);
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } });
}
} catch (error) { }
}
async function getTagsTemplates() {
const { default: got } = await import('got');
try {
if (isDev) {
let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
let tags = await fs.readFile('./devTags.json', 'utf8');
try {
if (await fs.stat('./testTemplate.yaml')) {
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
}
} catch (error) { }
try {
if (await fs.stat('./testTags.json')) {
const testTags = await fs.readFile('./testTags.json', 'utf8');
if (testTags.length > 0) {
tags = JSON.stringify(JSON.parse(tags).concat(JSON.parse(testTags)));
}
}
} catch (error) { }
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)));
await fs.writeFile('./tags.json', tags);
console.log('[004] Tags and templates loaded in dev mode...');
} else {
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text();
const response = await got
.get('https://get.coollabs.io/coolify/service-templates.yaml')
.text();
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)));
await fs.writeFile('/app/tags.json', tags);
console.log('[004] Tags and templates loaded...');
}
} catch (error) {
console.log("Couldn't get latest templates.");
console.log(error);
}
}
async function initServer() {
const appId = process.env['COOLIFY_APP_ID'];
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
try {
await asyncExecShell(`docker network create --attachable coolify`);
if (settings.doNotTrack === true) {
console.log('[000] Telemetry disabled...');
} else {
// Initialize Sentry
// Sentry.init({
// dsn: sentryDSN,
// environment: isDev ? 'development' : 'production',
// release: version
// });
// console.log('[000] Sentry initialized...')
}
} catch (error) {
console.error(error);
}
try {
console.log(`[001] Initializing server...`);
await executeCommand({ command: `docker network create --attachable coolify` });
} catch (error) { }
try {
console.log(`[002] Cleanup stucked builds...`);
const isOlder = compareVersions('3.8.1', version);
if (isOlder === 1) {
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
await prisma.build.updateMany({
where: { status: { in: ['running', 'queued'] } },
data: { status: 'failed' }
});
}
} catch (error) { }
try {
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
if (!isDev) await fs.rm('/tmp/build-sources', { recursive: true, force: true });
} catch (error) {
console.log(error);
}
}
async function getArch() {
try {
const settings = await prisma.setting.findFirst({})
const settings = await prisma.setting.findFirst({});
if (settings && !settings.arch) {
await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } })
console.log(`Getting architecture...`);
await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } });
}
} catch (error) { }
}
async function cleanupStuckedContainers() {
try {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set();
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress))
return;
if (destination.engine) {
enginesDone.add(destination.engine);
}
if (destination.remoteIpAddress) {
if (!destination.remoteVerified) continue;
enginesDone.add(destination.remoteIpAddress);
}
const { stdout: containers } = await executeCommand({
dockerId: destination.id,
command: `docker container ps -a --filter "label=coolify.managed=true" --format '{{ .Names}}'`
});
if (containers) {
const containersArray = containers.trim().split('\n');
if (containersArray.length > 0) {
for (const container of containersArray) {
const containerId = container.split('-')[0];
const application = await prisma.application.findFirst({
where: { id: { startsWith: containerId } }
});
const service = await prisma.service.findFirst({
where: { id: { startsWith: containerId } }
});
const database = await prisma.database.findFirst({
where: { id: { startsWith: containerId } }
});
if (!application && !service && !database) {
await executeCommand({ command: `docker container rm -f ${container}` });
}
}
}
}
}
} catch (error) {
console.log(error);
}
}
async function configureRemoteDockers() {
try {
const remoteDocker = await prisma.destinationDocker.findMany({
where: { remoteVerified: true, remoteEngine: true }
});
if (remoteDocker.length > 0) {
console.log(`Verifying Remote Docker Engines...`);
for (const docker of remoteDocker) {
await createRemoteEngineConfiguration(docker.id)
console.log('Verifying:', docker.remoteIpAddress);
await verifyRemoteDockerEngineFn(docker.id);
}
}
} catch (error) {
console.log(error);
}
}
async function autoUpdater() {
try {
const { default: got } = await import('got');
const currentVersion = version;
const { coolify } = await got
.get('https://get.coollabs.io/versions.json', {
searchParams: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
.json();
const latestVersion = coolify.main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = 0;
if (activeCount === 0) {
if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
let image = `ghcr.io/coollabsio/coolify:${latestVersion}`;
try {
await executeCommand({ command: `docker pull ${image}` });
} catch (error) {
image = `coollabsio/coolify:${latestVersion}`;
await executeCommand({ command: `docker pull ${image}` });
}
await executeCommand({ shell: true, command: `ls .env || env | grep "^COOLIFY" | sort > .env` });
await executeCommand({
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
});
await executeCommand({
shell: true,
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ${image} /bin/sh -c "env | grep "^COOLIFY" | sort > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
});
}
} else {
console.log('Updating (not really in dev mode).');
}
}
}
} catch (error) {
console.log(error);
}
}
async function checkFluentBit() {
try {
if (!isDev) {
const engine = '/var/run/docker.sock';
const { id } = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify' }
});
const { found } = await checkContainer({
dockerId: id,
container: 'coolify-fluentbit',
remove: true
});
if (!found) {
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
await executeCommand({ command: `docker compose up -d fluent-bit` });
}
}
} catch (error) {
console.log(error);
}
}
async function checkProxies() {
try {
const { default: isReachable } = await import('is-port-reachable');
let portReachable;
const { arch, ipv4, ipv6 } = await listSettings();
// Coolify Proxy local
const engine = '/var/run/docker.sock';
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
});
if (localDocker) {
portReachable = await isReachable(80, { host: ipv4 || ipv6 });
if (!portReachable) {
await startTraefikProxy(localDocker.id);
}
}
// Coolify Proxy remote
const remoteDocker = await prisma.destinationDocker.findMany({
where: { remoteEngine: true, remoteVerified: true }
});
if (remoteDocker.length > 0) {
for (const docker of remoteDocker) {
if (docker.isCoolifyProxyUsed) {
portReachable = await isReachable(80, { host: docker.remoteIpAddress });
if (!portReachable) {
await startTraefikProxy(docker.id);
}
}
try {
await createRemoteEngineConfiguration(docker.id);
} catch (error) { }
}
}
// TCP Proxies
const databasesWithPublicPort = await prisma.database.findMany({
where: { publicPort: { not: null } },
include: { settings: true, destinationDocker: true }
});
for (const database of databasesWithPublicPort) {
const { destinationDockerId, destinationDocker, publicPort, id } = database;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
const { privatePort } = generateDatabaseConfiguration(database, arch);
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
}
}
const wordpressWithFtp = await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const ftp of wordpressWithFtp) {
const { service, ftpPublicPort } = ftp;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
}
}
// HTTP Proxies
// const minioInstances = await prisma.minio.findMany({
// where: { publicPort: { not: null } },
// include: { service: { include: { destinationDocker: true } } }
// });
// for (const minio of minioInstances) {
// const { service, publicPort } = minio;
// const { destinationDockerId, destinationDocker, id } = service;
// if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
// await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
// }
// }
} catch (error) { }
}
async function copySSLCertificates() {
try {
const pAll = await import('p-all');
const actions = [];
const certificates = await prisma.certificate.findMany({ include: { team: true } });
const teamIds = certificates.map((c) => c.teamId);
const destinations = await prisma.destinationDocker.findMany({
where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } }
});
for (const certificate of certificates) {
const { id, key, cert } = certificate;
const decryptedKey = decrypt(key);
await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey);
await fs.writeFile(`/tmp/${id}-cert.pem`, cert);
for (const destination of destinations) {
if (destination.remoteEngine) {
if (destination.remoteVerified) {
const { id: dockerId, remoteIpAddress } = destination;
actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress));
}
} else {
actions.push(async () => copyLocalCertificates(id));
}
}
}
await pAll.default(actions, { concurrency: 1 });
} catch (error) {
console.log(error);
} finally {
try {
await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` });
} catch (e) {
console.log(e);
}
}
}
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
try {
await executeCommand({
command: `scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`
});
await executeCommand({
sshCommand: true,
shell: true,
dockerId,
command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`
});
await executeCommand({
sshCommand: true,
dockerId,
command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`
});
await executeCommand({
sshCommand: true,
dockerId,
command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`
});
} catch (error) {
console.log({ error });
}
}
async function copyLocalCertificates(id: string) {
try {
await executeCommand({
command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`,
shell: true
});
await executeCommand({
command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`
});
await executeCommand({
command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`
});
} catch (error) {
console.log({ error });
}
}
async function cleanupStorage() {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set();
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return;
if (destination.engine) {
enginesDone.add(destination.engine);
}
if (destination.remoteIpAddress) {
if (!destination.remoteVerified) continue;
enginesDone.add(destination.remoteIpAddress);
}
await cleanupDockerStorage(destination.id);
// let lowDiskSpace = false;
// try {
// let stdout = null;
// if (!isDev) {
// const output = await executeCommand({
// dockerId: destination.id,
// command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`,
// shell: true
// });
// stdout = output.stdout;
// } else {
// const output = await executeCommand({
// command: `df -kPT /`
// });
// stdout = output.stdout;
// }
// let lines = stdout.trim().split('\n');
// let header = lines[0];
// let regex =
// /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
// const boundaries = [];
// let match;
// while ((match = regex.exec(header))) {
// boundaries.push(match[0].length);
// }
// boundaries[boundaries.length - 1] = -1;
// const data = lines.slice(1).map((line) => {
// const cl = boundaries.map((boundary) => {
// const column = boundary > 0 ? line.slice(0, boundary) : line;
// line = line.slice(boundary);
// return column.trim();
// });
// return {
// capacity: Number.parseInt(cl[5], 10) / 100
// };
// });
// if (data.length > 0) {
// const { capacity } = data[0];
// if (capacity > 0.8) {
// lowDiskSpace = true;
// }
// }
// } catch (error) {}
// if (lowDiskSpace) {
// await cleanupDockerStorage(destination.id);
// }
}
}

View File

@@ -3,8 +3,26 @@ import crypto from 'crypto';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma, decryptApplication } from '../lib/common';
import {
copyBaseConfigurationFiles,
makeLabelForSimpleDockerfile,
makeLabelForStandaloneApplication,
saveBuildLog,
saveDockerRegistryCredentials,
setDefaultConfiguration
} from '../lib/buildPacks/common';
import {
createDirectories,
decrypt,
defaultComposeConfiguration,
getDomain,
prisma,
decryptApplication,
isDev,
pushToRegistry,
executeCommand,
generateSecrets
} from '../lib/common';
import * as importers from '../lib/importers';
import * as buildpacks from '../lib/buildPacks';
@@ -14,79 +32,338 @@ import * as buildpacks from '../lib/buildPacks';
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
await prisma.$disconnect()
await prisma.$disconnect();
process.exit(0);
}
});
const pThrottle = await import('p-throttle')
const pThrottle = await import('p-throttle');
const throttle = pThrottle.default({
limit: 1,
interval: 2000
});
const th = throttle(async () => {
try {
const queuedBuilds = await prisma.build.findMany({ where: { status: { in: ['queued', 'running'] } }, orderBy: { createdAt: 'asc' } });
const { concurrentBuilds } = await prisma.setting.findFirst({})
const queuedBuilds = await prisma.build.findMany({
where: { status: { in: ['queued', 'running'] } },
orderBy: { createdAt: 'asc' }
});
const { concurrentBuilds } = await prisma.setting.findFirst({});
if (queuedBuilds.length > 0) {
parentPort.postMessage({ deploying: true });
const concurrency = concurrentBuilds;
const pAll = await import('p-all');
const actions = []
const actions = [];
for (const queueBuild of queuedBuilds) {
actions.push(async () => {
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild } = queueBuild
application = decryptApplication(application)
const originalApplicationId = application.id
if (pullmergeRequestId) {
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
if (previewApplications.length > 0) {
previewApplicationId = previewApplications[0].id
let application = await prisma.application.findUnique({
where: { id: queueBuild.applicationId },
include: {
dockerRegistry: true,
destinationDocker: true,
gitSource: { include: { githubApp: true, gitlabApp: true } },
persistentStorage: true,
secrets: true,
settings: true,
teams: true
}
});
if (!application) {
await prisma.build.update({
where: { id: queueBuild.id },
data: {
status: 'failed'
}
});
throw new Error('Application not found');
}
const usableApplicationId = previewApplicationId || originalApplicationId
try {
if (queueBuild.status === 'running') {
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
}
let {
id: buildId,
type,
gitSourceId,
sourceBranch = null,
pullmergeRequestId = null,
previewApplicationId = null,
forceRebuild,
sourceRepository = null
} = queueBuild;
application = decryptApplication(application);
if (!gitSourceId && application.simpleDockerfile) {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
} = application
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = application
persistentStorage,
exposePort,
simpleDockerfile,
dockerRegistry
} = application;
const { workdir } = await createDirectories({ repository: applicationId, buildId });
try {
if (queueBuild.status === 'running') {
await saveBuildLog({
line: 'Building halted, restarting...',
buildId,
applicationId: application.id
});
}
const volumes =
persistentStorage?.map((storage) => {
if (storage.oldPath) {
return `${applicationId}${storage.path
.replace(/\//gi, '-')
.replace('-app', '')}:${storage.path}`;
}
if (storage.hostPath) {
return `${storage.hostPath}:${storage.path}`;
}
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
}) || [];
if (destinationDockerId) {
await prisma.build.update({
where: { id: buildId },
data: { status: 'running' }
});
try {
const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}`
});
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({
dockerId: destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
} catch (error) {
//
}
let envs = [];
if (secrets.length > 0) {
envs = [
...envs,
...generateSecrets(secrets, pullmergeRequestId, false, port)
];
}
await fs.writeFile(`${workdir}/Dockerfile`, simpleDockerfile);
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
await saveDockerRegistryCredentials({ url, username, password, workdir });
}
const labels = makeLabelForSimpleDockerfile({
applicationId,
type,
port: exposePort ? `${exposePort}:${port}` : port
});
try {
const composeVolumes = volumes
.filter((v) => {
if (
!v.startsWith('.') &&
!v.startsWith('..') &&
!v.startsWith('/') &&
!v.startsWith('~')
) {
return v;
}
})
.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[applicationId]: {
build: {
context: workdir
},
image: `${applicationId}:${buildId}`,
container_name: applicationId,
volumes,
labels,
environment: envs,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(destinationDocker.network)
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeCommand({
debug: true,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
}
} catch (error) {
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id });
}
if (error instanceof Error) {
await saveBuildLog({
line: error.message,
buildId,
applicationId: application.id
});
}
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
return;
}
try {
if (application.dockerRegistryImageName) {
const customTag = application.dockerRegistryImageName.split(':')[1] || buildId;
const imageName = application.dockerRegistryImageName.split(':')[0];
await saveBuildLog({
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
buildId,
applicationId: application.id
});
await pushToRegistry(application, workdir, buildId, imageName, customTag);
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
}
} catch (error) {
if (error.stdout) {
await saveBuildLog({ line: error.stdout, buildId, applicationId });
}
if (error.stderr) {
await saveBuildLog({ line: error.stderr, buildId, applicationId });
}
} finally {
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
await prisma.build.update({
where: { id: buildId },
data: { status: 'success' }
});
}
return;
}
const originalApplicationId = application.id;
const {
id: applicationId,
name,
destinationDocker,
destinationDockerId,
gitSource,
configHash,
fqdn,
projectId,
secrets,
phpModules,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
gitCommitHash,
dockerRegistry
} = application;
let {
branch,
repository,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
dockerComposeFileLocation,
dockerComposeConfiguration,
denoMainFile
} = application;
let imageId = applicationId;
let domain = getDomain(fqdn);
let location = null;
let tag = null;
let customTag = null;
let imageName = null;
let imageFoundLocally = false;
let imageFoundRemotely = false;
if (pullmergeRequestId) {
const previewApplications = await prisma.previewApplication.findMany({
where: { applicationId: originalApplicationId, pullmergeRequestId }
});
if (previewApplications.length > 0) {
previewApplicationId = previewApplications[0].id;
}
// Previews, we need to get the source branch and set subdomain
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
repository = sourceRepository || repository;
}
const { workdir, repodir } = await createDirectories({ repository, buildId });
try {
if (queueBuild.status === 'running') {
await saveBuildLog({
line: 'Building halted, restarting...',
buildId,
applicationId: application.id
});
}
const currentHash = crypto
.createHash('sha256')
.update(
@@ -112,31 +389,29 @@ import * as buildpacks from '../lib/buildPacks';
)
.digest('hex');
const { debug } = settings;
// if (concurrency === 1) {
// await prisma.build.updateMany({
// where: {
// status: { in: ['queued', 'running'] },
// id: { not: buildId },
// applicationId,
// createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
// },
// data: { status: 'failed' }
// });
// }
let imageId = applicationId;
let domain = getDomain(fqdn);
if (!debug) {
await saveBuildLog({
line: `Debug logging is disabled. Enable it above if necessary!`,
buildId,
applicationId
});
}
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
if (storage.oldPath) {
return `${applicationId}${storage.path
.replace(/\//gi, '-')
.replace('-app', '')}:${storage.path}`;
}
if (storage.hostPath) {
return `${storage.hostPath}:${storage.path}`;
}
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
try {
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration);
} catch (error) {}
let deployNeeded = true;
let destinationType;
@@ -144,8 +419,11 @@ import * as buildpacks from '../lib/buildPacks';
destinationType = 'docker';
}
if (destinationType === 'docker') {
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
await prisma.build.update({
where: { id: buildId },
data: { status: 'running' }
});
const configuration = await setDefaultConfiguration(application);
buildPack = configuration.buildPack;
@@ -153,9 +431,10 @@ import * as buildpacks from '../lib/buildPacks';
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
publishDirectory = configuration.publishDirectory || '';
baseDirectory = configuration.baseDirectory || '';
dockerFileLocation = configuration.dockerFileLocation;
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
@@ -165,6 +444,9 @@ import * as buildpacks from '../lib/buildPacks';
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
customUser: gitSource.customUser,
gitCommitHash,
configuration,
repository,
branch,
buildId,
@@ -178,20 +460,35 @@ import * as buildpacks from '../lib/buildPacks';
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
if (application.dockerRegistryImageName) {
imageName = application.dockerRegistryImageName.split(':')[0];
customTag = application.dockerRegistryImageName.split(':')[1] || tag;
} else {
customTag = tag;
imageName = applicationId;
}
if (pullmergeRequestId) {
customTag = `${customTag}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) { }
} catch (err) {}
if (!pullmergeRequestId) {
if (configHash !== currentHash) {
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
await saveBuildLog({
line: 'Configuration changed',
buildId,
applicationId
});
}
} else {
deployNeeded = false;
@@ -200,29 +497,78 @@ import * as buildpacks from '../lib/buildPacks';
deployNeeded = true;
}
let imageFound = false;
try {
await executeDockerCmd({
await executeCommand({
dockerId: destinationDocker.id,
command: `docker image inspect ${applicationId}:${tag}`
})
imageFound = true;
});
imageFoundLocally = true;
} catch (error) {
//
}
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
location = await saveDockerRegistryCredentials({
url,
username,
password,
workdir
});
}
if (forceRebuild) deployNeeded = true
if (!imageFound || deployNeeded) {
// if (true) {
try {
await executeCommand({
dockerId: destinationDocker.id,
command: `docker ${
location ? `--config ${location}` : ''
} pull ${imageName}:${customTag}`
});
imageFoundRemotely = true;
} catch (error) {
//
}
let imageFound = `${applicationId}:${tag}`;
if (imageFoundRemotely) {
imageFound = `${imageName}:${customTag}`;
}
await copyBaseConfigurationFiles(
buildPack,
workdir,
buildId,
applicationId,
baseImage
);
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
if (forceRebuild) deployNeeded = true;
if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
if (buildpacks[buildPack])
await buildpacks[buildPack]({
dockerId: destinationDocker.id,
network: destinationDocker.network,
buildId,
applicationId,
domain,
name,
type,
volumes,
labels,
pullmergeRequestId,
buildPack,
repository,
@@ -244,129 +590,204 @@ import * as buildpacks from '../lib/buildPacks';
pythonModule,
pythonVariable,
dockerFileLocation,
dockerComposeConfiguration,
dockerComposeFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
deploymentType,
forceRebuild
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
await saveBuildLog({
line: `Build pack ${buildPack} not found`,
buildId,
applicationId
});
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
if (imageFoundRemotely || deployNeeded) {
await saveBuildLog({
line: `Container image ${imageFound} found in Docker Registry - reuising it`,
buildId,
applicationId
});
} else {
if (imageFoundLocally || deployNeeded) {
await saveBuildLog({
line: `Container image ${imageFound} found locally - reuising it`,
buildId,
applicationId
});
}
}
}
try {
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker stop -t 0 ${imageId}` })
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker rm ${imageId}` })
} catch (error) {
//
}
const envs = [
`PORT=${port}`
];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
envs.push(`${secret.name}=${isSecretFound[0].value}`);
} else {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
if (buildPack === 'compose') {
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
try {
const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
});
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({
dockerId: destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
...defaultComposeConfiguration(destinationDocker.network),
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
if (foundBuild) {
} catch (error) {
//
}
try {
await executeCommand({
debug,
buildId,
applicationId,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} -f ${fileYaml} up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
data: { status: 'success' }
});
await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
throw new Error(error);
} else {
try {
const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.service=${
pullmergeRequestId ? imageId : applicationId
}' --format {{.ID}}`
});
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({
dockerId: destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
} catch (error) {
//
}
let envs = [];
if (secrets.length > 0) {
envs = [
...envs,
...generateSecrets(secrets, pullmergeRequestId, false, port)
];
}
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
await saveDockerRegistryCredentials({ url, username, password, workdir });
}
try {
const composeVolumes = volumes
.filter((v) => {
if (
!v.startsWith('.') &&
!v.startsWith('..') &&
!v.startsWith('/') &&
!v.startsWith('~')
) {
return v;
}
})
.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: imageFound,
container_name: imageId,
volumes,
environment: envs,
labels,
depends_on: [],
expose: [port],
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
...defaultComposeConfiguration(destinationDocker.network)
}
},
networks: {
[destinationDocker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await executeCommand({
debug,
dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
});
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
data: {
status: 'failed'
}
});
}
throw new Error(error);
}
if (!pullmergeRequestId)
await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
if (!pullmergeRequestId) await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
}
}
catch (error) {
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
} catch (error) {
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
if (foundBuild) {
await prisma.build.update({
where: { id: buildId },
@@ -378,17 +799,47 @@ import * as buildpacks from '../lib/buildPacks';
if (error !== 1) {
await saveBuildLog({ line: error, buildId, applicationId: application.id });
}
if (error instanceof Error) {
await saveBuildLog({
line: error.message,
buildId,
applicationId: application.id
});
}
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
return;
}
try {
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
await saveBuildLog({
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
buildId,
applicationId: application.id
});
await pushToRegistry(application, workdir, tag, imageName, customTag);
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
}
} catch (error) {
if (error.stdout) {
await saveBuildLog({ line: error.stdout, buildId, applicationId });
}
if (error.stderr) {
await saveBuildLog({ line: error.stderr, buildId, applicationId });
}
} finally {
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
}
});
}
await pAll.default(actions, { concurrency })
await pAll.default(actions, { concurrency });
}
} catch (error) {
console.log(error)
console.log(error);
}
})
});
while (true) {
await th()
await th();
}
} else process.exit(0);
})();

View File

@@ -1,229 +0,0 @@
import { parentPort } from 'node:worker_threads';
import axios from 'axios';
import { compareVersions } from 'compare-versions';
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version, createRemoteEngineConfiguration } from '../lib/common';
async function autoUpdater() {
try {
const currentVersion = version;
const { data: versions } = await axios
.get(
`https://get.coollabs.io/versions.json`
, {
params: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = 0
if (activeCount === 0) {
if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell(
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
}
} else {
console.log('Updating (not really in dev mode).');
}
}
}
} catch (error) { }
}
async function checkProxies() {
try {
const { default: isReachable } = await import('is-port-reachable');
let portReachable;
const { arch, ipv4, ipv6 } = await listSettings();
// Coolify Proxy local
const engine = '/var/run/docker.sock';
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
});
if (localDocker) {
portReachable = await isReachable(80, { host: ipv4 || ipv6 })
if (!portReachable) {
await startTraefikProxy(localDocker.id);
}
}
// Coolify Proxy remote
const remoteDocker = await prisma.destinationDocker.findMany({
where: { remoteEngine: true, remoteVerified: true }
});
if (remoteDocker.length > 0) {
for (const docker of remoteDocker) {
if (docker.isCoolifyProxyUsed) {
portReachable = await isReachable(80, { host: docker.remoteIpAddress })
if (!portReachable) {
await startTraefikProxy(docker.id);
}
}
try {
await createRemoteEngineConfiguration(docker.id)
} catch (error) { }
}
}
// TCP Proxies
const databasesWithPublicPort = await prisma.database.findMany({
where: { publicPort: { not: null } },
include: { settings: true, destinationDocker: true }
});
for (const database of databasesWithPublicPort) {
const { destinationDockerId, destinationDocker, publicPort, id } = database;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
const { privatePort } = generateDatabaseConfiguration(database, arch);
portReachable = await isReachable(publicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
}
}
}
const wordpressWithFtp = await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const ftp of wordpressWithFtp) {
const { service, ftpPublicPort } = ftp;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
portReachable = await isReachable(ftpPublicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
}
}
}
// HTTP Proxies
const minioInstances = await prisma.minio.findMany({
where: { publicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const minio of minioInstances) {
const { service, publicPort } = minio;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
portReachable = await isReachable(publicPort, { host: destinationDocker.remoteIpAddress || ipv4 || ipv6 })
if (!portReachable) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
}
}
}
} catch (error) {
}
}
async function cleanupPrismaEngines() {
if (!isDev) {
try {
const { stdout } = await asyncExecShell(`ps -ef | grep /app/prisma-engines/query-engine | grep -v grep | wc -l | xargs`)
if (stdout.trim() != null && stdout.trim() != '' && Number(stdout.trim()) > 1) {
await asyncExecShell(`killall -q -e /app/prisma-engines/query-engine -o 1m`)
}
} catch (error) { }
}
}
async function cleanupStorage() {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set()
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return
if (destination.engine) enginesDone.add(destination.engine)
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress)
let lowDiskSpace = false;
try {
let stdout = null
if (!isDev) {
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
stdout = output.stdout;
} else {
const output = await asyncExecShell(
`df -kPT /`
);
stdout = output.stdout;
}
let lines = stdout.trim().split('\n');
let header = lines[0];
let regex =
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
const boundaries = [];
let match;
while ((match = regex.exec(header))) {
boundaries.push(match[0].length);
}
boundaries[boundaries.length - 1] = -1;
const data = lines.slice(1).map((line) => {
const cl = boundaries.map((boundary) => {
const column = boundary > 0 ? line.slice(0, boundary) : line;
line = line.slice(boundary);
return column.trim();
});
return {
capacity: Number.parseInt(cl[5], 10) / 100
};
});
if (data.length > 0) {
const { capacity } = data[0];
if (capacity > 0.8) {
lowDiskSpace = true;
}
}
} catch (error) { }
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
}
}
(async () => {
let status = {
cleanupStorage: false,
autoUpdater: false
}
if (parentPort) {
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
process.exit(1);
}
if (message === 'action:cleanupStorage') {
if (!status.autoUpdater) {
status.cleanupStorage = true
await cleanupStorage();
status.cleanupStorage = false
}
return;
}
if (message === 'action:cleanupPrismaEngines') {
await cleanupPrismaEngines();
return;
}
if (message === 'action:checkProxies') {
await checkProxies();
return;
}
if (message === 'action:autoUpdater') {
if (!status.cleanupStorage) {
status.autoUpdater = true
await autoUpdater();
status.autoUpdater = false
}
return;
}
}
});
} else process.exit(0);
})();

531
apps/api/src/lib.ts Normal file
View File

@@ -0,0 +1,531 @@
import cuid from "cuid";
import { decrypt, encrypt, fixType, generatePassword, generateToken, prisma } from "./lib/common";
import { getTemplates } from "./lib/services";
export async function migrateApplicationPersistentStorage() {
const settings = await prisma.setting.findFirst()
if (settings) {
const { id: settingsId, applicationStoragePathMigrationFinished } = settings
try {
if (!applicationStoragePathMigrationFinished) {
const applications = await prisma.application.findMany({ include: { persistentStorage: true } });
for (const application of applications) {
if (application.persistentStorage && application.persistentStorage.length > 0 && application?.buildPack !== 'docker') {
for (const storage of application.persistentStorage) {
let { id, path } = storage
if (!path.startsWith('/app')) {
path = `/app${path}`
await prisma.applicationPersistentStorage.update({ where: { id }, data: { path, oldPath: true } })
}
}
}
}
}
} catch (error) {
console.log(error)
} finally {
await prisma.setting.update({ where: { id: settingsId }, data: { applicationStoragePathMigrationFinished: true } })
}
}
}
export async function migrateServicesToNewTemplate() {
// This function migrates old hardcoded services to the new template based services
try {
let templates = await getTemplates()
const services: any = await prisma.service.findMany({
include: {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
serviceSetting: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true,
meiliSearch: true,
umami: true,
hasura: true,
fider: true,
moodle: true,
appwrite: true,
glitchTip: true,
searxng: true,
weblate: true,
taiga: true,
}
})
for (const service of services) {
try {
const { id } = service
if (!service.type) {
continue;
}
let template = templates.find(t => fixType(t.type) === fixType(service.type));
if (template) {
template = JSON.parse(JSON.stringify(template).replaceAll('$$id', service.id))
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics) await plausibleAnalytics(service, template)
if (service.type === 'fider' && service.fider) await fider(service, template)
if (service.type === 'minio' && service.minio) await minio(service, template)
if (service.type === 'vscodeserver' && service.vscodeserver) await vscodeserver(service, template)
if (service.type === 'wordpress' && service.wordpress) await wordpress(service, template)
if (service.type === 'ghost' && service.ghost) await ghost(service, template)
if (service.type === 'meilisearch' && service.meiliSearch) await meilisearch(service, template)
if (service.type === 'umami' && service.umami) await umami(service, template)
if (service.type === 'hasura' && service.hasura) await hasura(service, template)
if (service.type === 'glitchTip' && service.glitchTip) await glitchtip(service, template)
if (service.type === 'searxng' && service.searxng) await searxng(service, template)
if (service.type === 'weblate' && service.weblate) await weblate(service, template)
if (service.type === 'appwrite' && service.appwrite) await appwrite(service, template)
try {
await createVolumes(service, template);
} catch (error) {
console.log(error)
}
if (template.variables) {
if (template.variables.length > 0) {
for (const variable of template.variables) {
let { defaultValue } = variable;
defaultValue = defaultValue.toString();
const regex = /^\$\$.*\((\d+)\)$/g;
const length = Number(regex.exec(defaultValue)?.[1]) || undefined
if (defaultValue.startsWith('$$generate_password')) {
variable.value = generatePassword({ length });
} else if (defaultValue.startsWith('$$generate_hex')) {
variable.value = generatePassword({ length, isHex: true });
} else if (defaultValue.startsWith('$$generate_username')) {
variable.value = cuid();
} else if (defaultValue.startsWith('$$generate_token')) {
variable.value = generateToken()
} else {
variable.value = defaultValue || '';
}
}
}
for (const variable of template.variables) {
if (variable.id.startsWith('$$secret_')) {
const found = await prisma.serviceSecret.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) {
await prisma.serviceSecret.create({
data: { name: variable.name, value: encrypt(variable.value) || '', service: { connect: { id } } }
})
}
}
if (variable.id.startsWith('$$config_')) {
const found = await prisma.serviceSetting.findFirst({ where: { name: variable.name, serviceId: id } })
if (!found) {
await prisma.serviceSetting.create({
data: { name: variable.name, value: variable.value.toString(), variableName: variable.id, service: { connect: { id } } }
})
}
}
}
}
for (const s of Object.keys(template.services)) {
if (service.type === 'plausibleanalytics') {
continue;
}
if (template.services[s].volumes) {
for (const volume of template.services[s].volumes) {
const [volumeName, path] = volume.split(':')
if (!volumeName.startsWith('/')) {
const found = await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: id } })
if (!found) {
await prisma.servicePersistentStorage.create({
data: { volumeName, path, containerId: s, predefined: true, service: { connect: { id } } }
});
}
}
}
}
}
await prisma.service.update({ where: { id }, data: { templateVersion: template.templateVersion } })
}
} catch (error) {
console.log(error)
}
}
} catch (error) {
console.log(error)
}
}
async function appwrite(service: any, template: any) {
const { opensslKeyV1, executorSecret, mariadbHost, mariadbPort, mariadbUser, mariadbPassword, mariadbRootUserPassword, mariadbDatabase } = service.appwrite
const secrets = [
`_APP_EXECUTOR_SECRET@@@${executorSecret}`,
`_APP_OPENSSL_KEY_V1@@@${opensslKeyV1}`,
`_APP_DB_PASS@@@${mariadbPassword}`,
`_APP_DB_ROOT_PASS@@@${mariadbRootUserPassword}`,
]
const settings = [
`_APP_DB_HOST@@@${mariadbHost}`,
`_APP_DB_PORT@@@${mariadbPort}`,
`_APP_DB_USER@@@${mariadbUser}`,
`_APP_DB_SCHEMA@@@${mariadbDatabase}`,
]
await migrateSecrets(secrets, service);
await migrateSettings(settings, service, template);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { appwrite: { disconnect: true } } })
}
async function weblate(service: any, template: any) {
const { adminPassword, postgresqlUser, postgresqlPassword, postgresqlDatabase } = service.weblate
const secrets = [
`WEBLATE_ADMIN_PASSWORD@@@${adminPassword}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
]
const settings = [
`WEBLATE_SITE_DOMAIN@@@$$generate_domain`,
`POSTGRES_USER@@@${postgresqlUser}`,
`POSTGRES_DATABASE@@@${postgresqlDatabase}`,
`POSTGRES_DB@@@${postgresqlDatabase}`,
`POSTGRES_HOST@@@$$id-postgres`,
`POSTGRES_PORT@@@5432`,
`REDIS_HOST@@@$$id-redis`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { weblate: { disconnect: true } } })
}
async function searxng(service: any, template: any) {
const { secretKey, redisPassword } = service.searxng
const secrets = [
`SECRET_KEY@@@${secretKey}`,
`REDIS_PASSWORD@@@${redisPassword}`,
]
const settings = [
`SEARXNG_BASE_URL@@@$$generate_fqdn`
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { searxng: { disconnect: true } } })
}
async function glitchtip(service: any, template: any) {
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, secretKeyBase, defaultEmail, defaultUsername, defaultPassword, defaultEmailFrom, emailSmtpHost, emailSmtpPort, emailSmtpUser, emailSmtpPassword, emailSmtpUseTls, emailSmtpUseSsl, emailBackend, mailgunApiKey, sendgridApiKey, enableOpenUserRegistration } = service.glitchTip
const { id } = service
const secrets = [
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
`SECRET_KEY@@@${secretKeyBase}`,
`MAILGUN_API_KEY@@@${mailgunApiKey}`,
`SENDGRID_API_KEY@@@${sendgridApiKey}`,
`DJANGO_SUPERUSER_PASSWORD@@@${defaultPassword}`,
emailSmtpUser && emailSmtpPassword && emailSmtpHost && emailSmtpPort && `EMAIL_URL@@@${encrypt(`smtp://${emailSmtpUser}:${decrypt(emailSmtpPassword)}@${emailSmtpHost}:${emailSmtpPort}`)} || ''`,
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
`REDIS_URL@@@${encrypt(`redis://${id}-redis:6379`)}`
]
const settings = [
`POSTGRES_USER@@@${postgresqlUser}`,
`POSTGRES_DB@@@${postgresqlDatabase}`,
`DEFAULT_FROM_EMAIL@@@${defaultEmailFrom}`,
`EMAIL_USE_TLS@@@${emailSmtpUseTls}`,
`EMAIL_USE_SSL@@@${emailSmtpUseSsl}`,
`EMAIL_BACKEND@@@${emailBackend}`,
`ENABLE_OPEN_USER_REGISTRATION@@@${enableOpenUserRegistration}`,
`DJANGO_SUPERUSER_EMAIL@@@${defaultEmail}`,
`DJANGO_SUPERUSER_USERNAME@@@${defaultUsername}`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
await prisma.service.update({ where: { id: service.id }, data: { type: 'glitchtip' } })
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { glitchTip: { disconnect: true } } })
}
async function hasura(service: any, template: any) {
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, graphQLAdminPassword } = service.hasura
const { id } = service
const secrets = [
`HASURA_GRAPHQL_ADMIN_SECRET@@@${graphQLAdminPassword}`,
`HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
]
const settings = [
`POSTGRES_USER@@@${postgresqlUser}`,
`POSTGRES_DB@@@${postgresqlDatabase}`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { hasura: { disconnect: true } } })
}
async function umami(service: any, template: any) {
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami
const { id } = service
const secrets = [
`HASH_SALT@@@${hashSalt}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
`ADMIN_PASSWORD@@@${umamiAdminPassword}`,
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
]
const settings = [
`DATABASE_TYPE@@@postgresql`,
`POSTGRES_USER@@@${postgresqlUser}`,
`POSTGRES_DB@@@${postgresqlDatabase}`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
await prisma.service.update({ where: { id: service.id }, data: { type: "umami-postgresql" } })
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { umami: { disconnect: true } } })
}
async function meilisearch(service: any, template: any) {
const { masterKey } = service.meiliSearch
const secrets = [
`MEILI_MASTER_KEY@@@${masterKey}`,
]
// await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { meiliSearch: { disconnect: true } } })
}
async function ghost(service: any, template: any) {
const { defaultEmail, defaultPassword, mariadbUser, mariadbPassword, mariadbRootUser, mariadbRootUserPassword, mariadbDatabase } = service.ghost
const { fqdn } = service
const isHttps = fqdn.startsWith('https://');
const secrets = [
`GHOST_PASSWORD@@@${defaultPassword}`,
`MARIADB_PASSWORD@@@${mariadbPassword}`,
`MARIADB_ROOT_PASSWORD@@@${mariadbRootUserPassword}`,
`GHOST_DATABASE_PASSWORD@@@${mariadbPassword}`,
]
const settings = [
`GHOST_EMAIL@@@${defaultEmail}`,
`GHOST_DATABASE_HOST@@@${service.id}-mariadb`,
`GHOST_DATABASE_USER@@@${mariadbUser}`,
`GHOST_DATABASE_NAME@@@${mariadbDatabase}`,
`GHOST_DATABASE_PORT_NUMBER@@@3306`,
`MARIADB_USER@@@${mariadbUser}`,
`MARIADB_DATABASE@@@${mariadbDatabase}`,
`MARIADB_ROOT_USER@@@${mariadbRootUser}`,
`GHOST_HOST@@@$$generate_domain`,
`url@@@$$generate_fqdn`,
`GHOST_ENABLE_HTTPS@@@${isHttps ? 'yes' : 'no'}`
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
await prisma.service.update({ where: { id: service.id }, data: { type: "ghost-mariadb" } })
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { ghost: { disconnect: true } } })
}
async function wordpress(service: any, template: any) {
const { extraConfig, tablePrefix, ownMysql, mysqlHost, mysqlPort, mysqlUser, mysqlPassword, mysqlRootUser, mysqlRootUserPassword, mysqlDatabase, ftpEnabled, ftpUser, ftpPassword, ftpPublicPort, ftpHostKey, ftpHostKeyPrivate } = service.wordpress
let settings = []
let secrets = []
if (ownMysql) {
secrets = [
`WORDPRESS_DB_PASSWORD@@@${mysqlPassword}`,
ftpPassword && `COOLIFY_FTP_PASSWORD@@@${ftpPassword}`,
ftpHostKeyPrivate && `COOLIFY_FTP_HOST_KEY_PRIVATE@@@${ftpHostKeyPrivate}`,
ftpHostKey && `COOLIFY_FTP_HOST_KEY@@@${ftpHostKey}`,
]
settings = [
`WORDPRESS_CONFIG_EXTRA@@@${extraConfig}`,
`WORDPRESS_DB_HOST@@@${mysqlHost}`,
`WORDPRESS_DB_PORT@@@${mysqlPort}`,
`WORDPRESS_DB_USER@@@${mysqlUser}`,
`WORDPRESS_DB_NAME@@@${mysqlDatabase}`,
]
} else {
secrets = [
`MYSQL_ROOT_PASSWORD@@@${mysqlRootUserPassword}`,
`MYSQL_PASSWORD@@@${mysqlPassword}`,
ftpPassword && `COOLIFY_FTP_PASSWORD@@@${ftpPassword}`,
ftpHostKeyPrivate && `COOLIFY_FTP_HOST_KEY_PRIVATE@@@${ftpHostKeyPrivate}`,
ftpHostKey && `COOLIFY_FTP_HOST_KEY@@@${ftpHostKey}`,
]
settings = [
`MYSQL_ROOT_USER@@@${mysqlRootUser}`,
`MYSQL_USER@@@${mysqlUser}`,
`MYSQL_DATABASE@@@${mysqlDatabase}`,
`MYSQL_HOST@@@${service.id}-mysql`,
`MYSQL_PORT@@@${mysqlPort}`,
`WORDPRESS_CONFIG_EXTRA@@@${extraConfig}`,
`WORDPRESS_TABLE_PREFIX@@@${tablePrefix}`,
`WORDPRESS_DB_HOST@@@${service.id}-mysql`,
`COOLIFY_OWN_DB@@@${ownMysql}`,
`COOLIFY_FTP_ENABLED@@@${ftpEnabled}`,
`COOLIFY_FTP_USER@@@${ftpUser}`,
`COOLIFY_FTP_PUBLIC_PORT@@@${ftpPublicPort}`,
]
}
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
if (ownMysql) {
await prisma.service.update({ where: { id: service.id }, data: { type: "wordpress-only" } })
}
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { wordpress: { disconnect: true } } })
}
async function vscodeserver(service: any, template: any) {
const { password } = service.vscodeserver
const secrets = [
`PASSWORD@@@${password}`,
]
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { vscodeserver: { disconnect: true } } })
}
async function minio(service: any, template: any) {
const { rootUser, rootUserPassword, apiFqdn } = service.minio
const secrets = [
`MINIO_ROOT_PASSWORD@@@${rootUserPassword}`,
]
const settings = [
`MINIO_ROOT_USER@@@${rootUser}`,
`MINIO_SERVER_URL@@@${apiFqdn}`,
`MINIO_BROWSER_REDIRECT_URL@@@$$generate_fqdn`,
`MINIO_DOMAIN@@@$$generate_domain`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { minio: { disconnect: true } } })
}
async function fider(service: any, template: any) {
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, jwtSecret, emailNoreply, emailMailgunApiKey, emailMailgunDomain, emailMailgunRegion, emailSmtpHost, emailSmtpPort, emailSmtpUser, emailSmtpPassword, emailSmtpEnableStartTls } = service.fider
const { id } = service
const secrets = [
`JWT_SECRET@@@${jwtSecret}`,
emailMailgunApiKey && `EMAIL_MAILGUN_API@@@${emailMailgunApiKey}`,
emailSmtpPassword && `EMAIL_SMTP_PASSWORD@@@${emailSmtpPassword}`,
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
`DATABASE_URL@@@${encrypt(`postgresql://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}?sslmode=disable`)}`
]
const settings = [
`BASE_URL@@@$$generate_fqdn`,
`EMAIL_NOREPLY@@@${emailNoreply || 'noreply@example.com'}`,
`EMAIL_MAILGUN_DOMAIN@@@${emailMailgunDomain || ''}`,
`EMAIL_MAILGUN_REGION@@@${emailMailgunRegion || ''}`,
`EMAIL_SMTP_HOST@@@${emailSmtpHost || ''}`,
`EMAIL_SMTP_PORT@@@${emailSmtpPort || 587}`,
`EMAIL_SMTP_USER@@@${emailSmtpUser || ''}`,
`EMAIL_SMTP_PASSWORD@@@${emailSmtpPassword || ''}`,
`EMAIL_SMTP_ENABLE_STARTTLS@@@${emailSmtpEnableStartTls || 'false'}`,
`POSTGRES_USER@@@${postgresqlUser}`,
`POSTGRES_DB@@@${postgresqlDatabase}`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { fider: { disconnect: true } } })
}
async function plausibleAnalytics(service: any, template: any) {
const { email, username, password, postgresqlUser, postgresqlPassword, postgresqlDatabase, secretKeyBase, scriptName } = service.plausibleAnalytics;
const { id } = service
const settings = [
`BASE_URL@@@$$generate_fqdn`,
`ADMIN_USER_EMAIL@@@${email}`,
`ADMIN_USER_NAME@@@${username}`,
`DISABLE_AUTH@@@false`,
`DISABLE_REGISTRATION@@@true`,
`POSTGRESQL_USERNAME@@@${postgresqlUser}`,
`POSTGRESQL_DATABASE@@@${postgresqlDatabase}`,
`SCRIPT_NAME@@@${scriptName}`,
]
const secrets = [
`ADMIN_USER_PWD@@@${password}`,
`SECRET_KEY_BASE@@@${secretKeyBase}`,
`POSTGRESQL_PASSWORD@@@${postgresqlPassword}`,
`DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
]
await migrateSettings(settings, service, template);
await migrateSecrets(secrets, service);
// Disconnect old service data
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
}
async function migrateSettings(settings: any[], service: any, template: any) {
for (const setting of settings) {
try {
if (!setting) continue;
let [name, value] = setting.split('@@@')
let minio = name
if (name === 'MINIO_SERVER_URL') {
name = 'coolify_fqdn_minio_console'
}
if (!value || value === 'null') {
continue;
}
let variableName = template.variables.find((v: any) => v.name === name)?.id
if (!variableName) {
variableName = `$$config_${name.toLowerCase()}`
}
// console.log('Migrating setting', name, value, 'for service', service.id, ', service name:', service.name, 'variableName: ', variableName)
await prisma.serviceSetting.findFirst({ where: { name: minio, serviceId: service.id } }) || await prisma.serviceSetting.create({ data: { name: minio, value, variableName, service: { connect: { id: service.id } } } })
} catch (error) {
console.log(error)
}
}
}
async function migrateSecrets(secrets: any[], service: any) {
for (const secret of secrets) {
try {
if (!secret) continue;
let [name, value] = secret.split('@@@')
if (!value || value === 'null') {
continue
}
// console.log('Migrating secret', name, value, 'for service', service.id, ', service name:', service.name)
await prisma.serviceSecret.findFirst({ where: { name, serviceId: service.id } }) || await prisma.serviceSecret.create({ data: { name, value, service: { connect: { id: service.id } } } })
} catch (error) {
console.log(error)
}
}
}
async function createVolumes(service: any, template: any) {
const volumes = [];
for (const s of Object.keys(template.services)) {
if (template.services[s].volumes && template.services[s].volumes.length > 0) {
for (const volume of template.services[s].volumes) {
let volumeName = volume.split(':')[0]
const volumePath = volume.split(':')[1]
let volumeService = s
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics?.id) {
let volumeId = volumeName.split('-')[0]
volumeName = volumeName.replace(volumeId, service.plausibleAnalytics.id)
}
volumes.push(`${volumeName}@@@${volumePath}@@@${volumeService}`)
}
}
}
for (const volume of volumes) {
const [volumeName, path, containerId] = volume.split('@@@')
// console.log('Creating volume', volumeName, path, containerId, 'for service', service.id, ', service name:', service.name)
await prisma.servicePersistentStorage.findFirst({ where: { volumeName, serviceId: service.id } }) || await prisma.servicePersistentStorage.create({ data: { volumeName, path, containerId, predefined: true, service: { connect: { id: service.id } } } })
}
}

View File

@@ -1,6 +1,18 @@
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import {
base64Encode,
decrypt,
encrypt,
executeCommand,
generateSecrets,
generateTimestamp,
getDomain,
isARM,
isDev,
prisma,
version
} from '../common';
import { promises as fs } from 'fs';
import { day } from "../dayjs";
import { day } from '../dayjs';
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
const nodeBased = [
@@ -17,7 +29,10 @@ const nodeBased = [
'nextjs'
];
export function setDefaultBaseImage(buildPack: string | null, deploymentType: string | null = null) {
export function setDefaultBaseImage(
buildPack: string | null,
deploymentType: string | null = null
) {
const nodeVersions = [
{
value: 'node:lts',
@@ -52,6 +67,14 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
{
value: 'webdevops/apache:alpine',
label: 'webdevops/apache:alpine'
},
{
value: 'nginx:alpine',
label: 'nginx:alpine'
},
{
value: 'httpd:alpine',
label: 'httpd:alpine (Apache)'
}
];
const rustVersions = [
@@ -214,8 +237,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
label: 'webdevops/php-apache:7.1-alpine'
},
{
value: 'webdevops/php-nginx:7.1-alpine',
label: 'webdevops/php-nginx:7.1-alpine'
value: 'php:8.1-fpm',
label: 'php:8.1-fpm'
},
{
value: 'php:8.0-fpm',
label: 'php:8.0-fpm'
},
{
value: 'php:8.1-fpm-alpine',
label: 'php:8.1-fpm-alpine'
},
{
value: 'php:8.0-fpm-alpine',
label: 'php:8.0-fpm-alpine'
}
];
const pythonVersions = [
@@ -296,8 +331,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
{
value: 'heroku/builder-classic:22',
label: 'heroku/builder-classic:22'
},
]
}
];
let payload: any = {
baseImage: null,
baseBuildImage: null,
@@ -306,8 +341,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
};
if (nodeBased.includes(buildPack)) {
if (deploymentType === 'static') {
payload.baseImage = 'webdevops/nginx:alpine';
payload.baseImages = staticVersions;
payload.baseImage = isARM() ? 'nginx:alpine' : 'webdevops/nginx:alpine';
payload.baseImages = isARM()
? staticVersions.filter((version) => !version.value.includes('webdevops'))
: staticVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
} else {
@@ -318,8 +355,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
}
}
if (staticApps.includes(buildPack)) {
payload.baseImage = 'webdevops/nginx:alpine';
payload.baseImages = staticVersions;
payload.baseImage = isARM() ? 'nginx:alpine' : 'webdevops/nginx:alpine';
payload.baseImages = isARM()
? staticVersions.filter((version) => !version.value.includes('webdevops'))
: staticVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
}
@@ -337,18 +376,26 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
payload.baseImage = 'denoland/deno:latest';
}
if (buildPack === 'php') {
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
payload.baseImages = phpVersions;
payload.baseImage = isARM()
? 'php:8.1-fpm-alpine'
: 'webdevops/php-apache:8.2-alpine';
payload.baseImages = isARM()
? phpVersions.filter((version) => !version.value.includes('webdevops'))
: phpVersions;
}
if (buildPack === 'laravel') {
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
payload.baseImage = isARM()
? 'php:8.1-fpm-alpine'
: 'webdevops/php-apache:8.2-alpine';
payload.baseImages = isARM()
? phpVersions.filter((version) => !version.value.includes('webdevops'))
: phpVersions;
payload.baseBuildImage = 'node:18';
payload.baseBuildImages = nodeVersions;
}
if (buildPack === 'heroku') {
payload.baseImage = 'heroku/buildpacks:20';
payload.baseImages = herokuVersions;
}
return payload;
}
@@ -363,6 +410,7 @@ export const setDefaultConfiguration = async (data: any) => {
publishDirectory,
baseDirectory,
dockerFileLocation,
dockerComposeFileLocation,
denoMainFile
} = data;
//@ts-ignore
@@ -381,10 +429,16 @@ export const setDefaultConfiguration = async (data: any) => {
startCommand = template?.startCommand || 'yarn start';
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
buildCommand = template?.buildCommand || null;
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (!publishDirectory) {
publishDirectory = template?.publishDirectory || null;
} else {
if (!publishDirectory.startsWith('/')) publishDirectory = `/${publishDirectory}`;
if (publishDirectory.endsWith('/')) publishDirectory = publishDirectory.slice(0, -1);
}
if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
baseDirectory = baseDirectory.slice(0, -1);
}
if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
@@ -392,6 +446,14 @@ export const setDefaultConfiguration = async (data: any) => {
} else {
dockerFileLocation = '/Dockerfile';
}
if (dockerComposeFileLocation) {
if (!dockerComposeFileLocation.startsWith('/'))
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
if (dockerComposeFileLocation.endsWith('/'))
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
} else {
dockerComposeFileLocation = '/Dockerfile';
}
if (!denoMainFile) {
denoMainFile = 'main.ts';
}
@@ -405,6 +467,7 @@ export const setDefaultConfiguration = async (data: any) => {
publishDirectory,
baseDirectory,
dockerFileLocation,
dockerComposeFileLocation,
denoMainFile
};
};
@@ -451,7 +514,6 @@ export const scanningTemplates = {
}
};
export const saveBuildLog = async ({
line,
buildId,
@@ -461,16 +523,28 @@ export const saveBuildLog = async ({
buildId: string;
applicationId: string;
}): Promise<any> => {
const { default: got } = await import('got')
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
const { default: got } = await import('got');
if (typeof line === 'object' && line) {
if (line.shortMessage) {
line = line.shortMessage + '\n' + line.stderr;
} else {
line = JSON.stringify(line);
}
}
if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `[${generateTimestamp()}] ${line}`;
const fluentBitUrl = isDev ? 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
const fluentBitUrl = isDev
? process.env.COOLIFY_CONTAINER_DEV === 'true'
? 'http://coolify-fluentbit:24224'
: 'http://localhost:24224'
: 'http://coolify-fluentbit:24224';
if (isDev) {
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
console.debug(`[${applicationId}] ${addTimestamp}`);
}
try {
@@ -478,15 +552,17 @@ export const saveBuildLog = async ({
json: {
line: encrypt(line)
}
})
} catch(error) {
});
} catch (error) {
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
line: addTimestamp,
buildId,
time: Number(day().valueOf()),
applicationId
}
});
}
};
export async function copyBaseConfigurationFiles(
@@ -558,6 +634,7 @@ export async function copyBaseConfigurationFiles(
`
);
}
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
} catch (error) {
throw new Error(error);
}
@@ -571,6 +648,29 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
);
}
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
if (!username || !password) {
return null;
}
let decryptedPassword = decrypt(password);
const location = `${workdir}/.docker`;
try {
await fs.mkdir(`${workdir}/.docker`);
} catch (error) {
// console.log(error);
}
const payload = JSON.stringify({
auths: {
[url]: {
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
}
}
});
await fs.writeFile(`${location}/config.json`, payload);
return location;
}
export async function buildImage({
applicationId,
tag,
@@ -579,58 +679,51 @@ export async function buildImage({
dockerId,
isCache = false,
debug = false,
dockerFileLocation = '/Dockerfile'
dockerFileLocation = '/Dockerfile',
commit,
forceRebuild = false
}) {
if (isCache) {
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
} else {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
}
if (!debug && isCache) {
await saveBuildLog({
line: `Debug turned off. To see more details, allow it in the configuration.`,
buildId,
applicationId
});
}
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} ${workdir}` })
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
if (status === 'canceled') {
throw new Error('Deployment canceled.')
}
if (isCache) {
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });
} else {
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
}
}
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`;
let location = null;
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
await new Promise((resolve, reject) => {
docker.engine.modem.followProgress(stream, onFinished, onProgress);
function onFinished(err, res) {
if (err) reject(err);
resolve(res);
}
async function onProgress(event) {
if (event.error) {
reject(event.error);
} else if (event.stream) {
if (event.stream !== '\n') {
if (debug)
await saveBuildLog({
line: `${event.stream.replace('\n', '')}`,
buildId,
applicationId
});
}
}
}
const { dockerRegistry } = await prisma.application.findUnique({
where: { id: applicationId },
select: { dockerRegistry: true }
});
}
if (dockerRegistry) {
const { url, username, password } = dockerRegistry;
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
}
await executeCommand({
stream: true,
debug,
buildId,
applicationId,
dockerId,
command: `docker ${location ? `--config ${location}` : ''} build ${forceRebuild ? '--no-cache' : ''
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
});
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
if (status === 'canceled') {
throw new Error('Canceled.');
}
}
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.applicationId=${applicationId}`,
`coolify.type=standalone-application`
];
}
export function makeLabelForStandaloneApplication({
applicationId,
fqdn,
@@ -657,7 +750,9 @@ export function makeLabelForStandaloneApplication({
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.applicationId=${applicationId}`,
`coolify.type=standalone-application`,
`coolify.name=${name}`,
`coolify.configuration=${base64Encode(
JSON.stringify({
applicationId,
@@ -697,21 +792,8 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (isPnpm) {
@@ -722,48 +804,32 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
Dockerfile.push(`RUN ${installCommand}`);
}
Dockerfile.push(`RUN ${buildCommand}`);
Dockerfile.push('RUN rm -fr .git');
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true });
}
export async function buildCacheImageForLaravel(data, imageForBuild) {
const { workdir, buildId, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
Dockerfile.push(`COPY *.json *.mix.js /app/`);
Dockerfile.push(`COPY resources /app/resources`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`RUN yarn install && yarn production`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true });
}
export async function buildCacheImageWithCargo(data, imageForBuild) {
const {
applicationId,
workdir,
buildId,
} = data;
const { applicationId, workdir, buildId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
@@ -778,6 +844,7 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
Dockerfile.push('RUN cargo install cargo-chef');
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
Dockerfile.push('RUN rm -fr .git');
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ ...data, isCache: true });
}
}

View File

@@ -0,0 +1,197 @@
import { promises as fs } from 'fs';
import { defaultComposeConfiguration, executeCommand, generateSecrets } from '../common';
import { saveBuildLog } from './common';
import yaml from 'js-yaml';
export default async function (data) {
let {
applicationId,
debug,
buildId,
dockerId,
network,
volumes,
labels,
workdir,
baseDirectory,
secrets,
pullmergeRequestId,
dockerComposeConfiguration,
dockerComposeFileLocation
} = data;
const baseDir = `${workdir}${baseDirectory}`;
const envFile = `${baseDir}/.env`;
const fileYaml = `${baseDir}${dockerComposeFileLocation}`;
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
const dockerComposeYaml = yaml.load(dockerComposeRaw);
if (!dockerComposeYaml.services) {
throw 'No Services found in docker-compose file.';
}
let envs = [];
let buildEnvs = [];
if (secrets.length > 0) {
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
buildEnvs = [...buildEnvs, ...generateSecrets(secrets, pullmergeRequestId, true, null, true)];
}
await fs.writeFile(envFile, envs.join('\n'));
const composeVolumes = [];
if (volumes.length > 0) {
for (const volume of volumes) {
let [v, path] = volume.split(':');
if (!v.startsWith('.') && !v.startsWith('..') && !v.startsWith('/') && !v.startsWith('~')) {
composeVolumes[v] = {
name: v
};
}
}
}
let networks = {};
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
value['container_name'] = `${applicationId}-${key}`;
if (value['env_file']) {
delete value['env_file'];
}
value['env_file'] = [envFile];
// let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
// let finalEnvs = [...envs];
// if (Object.keys(environment).length > 0) {
// for (const arg of Object.keys(environment)) {
// const [key, _] = arg.split('=');
// if (finalEnvs.filter((env) => env.startsWith(key)).length === 0) {
// finalEnvs.push(arg);
// }
// }
// }
// value['environment'] = [...finalEnvs];
let build = typeof value['build'] === 'undefined' ? [] : value['build'];
if (typeof build === 'string') {
build = { context: build };
}
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
let finalBuildArgs = [...buildEnvs];
if (Object.keys(buildArgs).length > 0) {
for (const arg of Object.keys(buildArgs)) {
const [key, _] = arg.split('=');
if (finalBuildArgs.filter((env) => env.startsWith(key)).length === 0) {
finalBuildArgs.push(arg);
}
}
}
if (build.length > 0 || buildArgs.length > 0) {
value['build'] = {
...build,
args: finalBuildArgs
};
}
value['labels'] = labels;
// TODO: If we support separated volume for each service, we need to add it here
if (value['volumes']?.length > 0) {
value['volumes'] = value['volumes'].map((volume) => {
if (typeof volume === 'string') {
let [v, path, permission] = volume.split(':');
if (
v.startsWith('.') ||
v.startsWith('..') ||
v.startsWith('/') ||
v.startsWith('~') ||
v.startsWith('$PWD')
) {
v = v
.replace(/^\./, `~`)
.replace(/^\.\./, '~')
.replace(/^\$PWD/, '~');
} else {
if (!path) {
path = v;
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
} else {
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
}
composeVolumes[v] = {
name: v
};
}
return `${v}:${path}${permission ? ':' + permission : ''}`;
}
if (typeof volume === 'object') {
let { source, target, mode } = volume;
if (
source.startsWith('.') ||
source.startsWith('..') ||
source.startsWith('/') ||
source.startsWith('~') ||
source.startsWith('$PWD')
) {
source = source
.replace(/^\./, `~`)
.replace(/^\.\./, '~')
.replace(/^\$PWD/, '~');
} else {
if (!target) {
target = source;
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
} else {
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
}
}
return `${source}:${target}${mode ? ':' + mode : ''}`;
}
});
}
if (volumes.length > 0) {
for (const volume of volumes) {
value['volumes'].push(volume);
}
}
if (dockerComposeConfiguration[key]?.port) {
value['expose'] = [dockerComposeConfiguration[key].port];
}
value['networks'] = [network];
if (value['build']?.network) {
delete value['build']['network'];
}
// if (value['networks']?.length > 0) {
// value['networks'].forEach((network) => {
// networks[network] = {
// name: network
// };
// });
// value['networks'] = [...(value['networks'] || ''), network];
// } else {
// value['networks'] = [network];
// }
dockerComposeYaml.services[key] = {
...dockerComposeYaml.services[key],
restart: defaultComposeConfiguration(network).restart,
deploy: defaultComposeConfiguration(network).deploy
};
}
if (Object.keys(composeVolumes).length > 0) {
dockerComposeYaml['volumes'] = { ...composeVolumes };
}
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
await executeCommand({
debug,
buildId,
applicationId,
dockerId,
command: `docker compose --project-directory ${workdir} -f ${fileYaml} pull`
});
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
await executeCommand({
debug,
buildId,
applicationId,
dockerId,
command: `docker compose --project-directory ${workdir} -f ${fileYaml} build --progress plain`
});
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
}

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (depsFound) {
@@ -48,8 +36,9 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
Dockerfile.push(`ENV NO_COLOR true`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,53 +1,27 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common';
export default async function (data) {
let {
applicationId,
debug,
tag,
workdir,
buildId,
baseDirectory,
secrets,
pullmergeRequestId,
dockerFileLocation
} = data
try {
const file = `${workdir}${dockerFileLocation}`;
let dockerFileOut = `${workdir}`;
if (baseDirectory) {
dockerFileOut = `${workdir}${baseDirectory}`;
workdir = `${workdir}${baseDirectory}`;
let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
data.workdir = `${workdir}${baseDirectory}`;
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8');
const Dockerfile: Array<string> = DockerfileRaw.toString().trim().split('\n');
Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
}
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8'))
.toString()
.trim()
.split('\n');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
// TODO: fix secrets
if (
(pullmergeRequestId && secret.isPRMRSecret) ||
(!pullmergeRequestId && !secret.isPRMRSecret)
) {
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
}
});
}
});
if (secrets.length > 0) {
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, env);
}
});
}
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage(data);
} catch (error) {
throw error;
});
}
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage(data);
}

View File

@@ -8,10 +8,11 @@ const createDockerfile = async (data, imageforBuild): Promise<void> => {
Dockerfile.push(`FROM ${imageforBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,39 +1,17 @@
import { executeDockerCmd, prisma } from "../common"
import { executeCommand } from "../common"
import { saveBuildLog } from "./common";
export default async function (data: any): Promise<void> {
const { buildId, applicationId, tag, dockerId, debug, workdir } = data
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
try {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
const { stdout } = await executeDockerCmd({
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
await executeCommand({
buildId,
debug,
dockerId,
command: `pack build -p ${workdir} ${applicationId}:${tag} --builder heroku/buildpacks:20`
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
})
if (debug) {
const array = stdout.split('\n')
for (const line of array) {
if (line !== '\n') {
await saveBuildLog({
line: `${line.replace('\n', '')}`,
buildId,
applicationId
});
}
}
}
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
} catch (error) {
const array = error.stdout.split('\n')
for (const line of array) {
if (line !== '\n') {
await saveBuildLog({
line: `${line.replace('\n', '')}`,
buildId,
applicationId
});
}
}
throw error;
}
}

View File

@@ -16,6 +16,7 @@ import python from './python';
import deno from './deno';
import laravel from './laravel';
import heroku from './heroku';
import compose from './compose'
export {
node,
@@ -35,5 +36,6 @@ export {
python,
deno,
laravel,
heroku
heroku,
compose
};

View File

@@ -1,12 +1,18 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageForLaravel, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { workdir, applicationId, tag, buildId, port } = data;
const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
@@ -24,6 +30,7 @@ const createDockerfile = async (data, image): Promise<void> => {
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
);
Dockerfile.push(`COPY --chown=application:application . ./`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -2,7 +2,7 @@ import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
const { buildId, applicationId, tag, port, startCommand, workdir, publishDirectory } = data;
const Dockerfile: Array<string> = [];
const isPnpm = startCommand.includes('pnpm');
@@ -12,8 +12,8 @@ const createDockerfile = async (data, image): Promise<void> => {
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (isPnpm) {
@@ -48,13 +36,15 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
Dockerfile.push(`RUN ${buildCommand}`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
} else if (deploymentType === 'static') {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE 80`);
}

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -20,21 +21,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (isPnpm) {
@@ -46,6 +34,7 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`RUN ${buildCommand}`);
}
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`CMD ${startCommand}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -24,21 +25,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (isPnpm) {
@@ -48,13 +36,15 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
Dockerfile.push(`RUN ${buildCommand}`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
} else if (deploymentType === 'static') {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE 80`);
}

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common';
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
@@ -13,21 +14,8 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
Dockerfile.push('WORKDIR /app');
@@ -40,6 +28,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
}
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -18,21 +19,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
@@ -64,7 +52,7 @@ const createDockerfile = async (data, image): Promise<void> => {
} else {
Dockerfile.push(`CMD python ${pythonModule}`);
}
Dockerfile.push('RUN rm -fr .git');
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,6 +1,6 @@
import { promises as fs } from 'fs';
import TOML from '@iarna/toml';
import { asyncExecShell } from '../common';
import { executeCommand } from '../common';
import { buildCacheImageWithCargo, buildImage } from './common';
const createDockerfile = async (data, image, name): Promise<void> => {
@@ -20,6 +20,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
);
Dockerfile.push(`RUN update-ca-certificates`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ["/app/${name}"]`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
@@ -28,7 +29,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
export default async function (data) {
try {
const { workdir, baseImage, baseBuildImage } = data;
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
const parsedToml: any = TOML.parse(cargoToml);
const name = parsedToml.package.name;
await buildCacheImageWithCargo(data, baseBuildImage);

View File

@@ -1,4 +1,5 @@
import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
@@ -18,34 +19,26 @@ const createDockerfile = async (data, image): Promise<void> => {
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
if (baseImage?.includes('httpd')) {
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
} else {
Dockerfile.push('WORKDIR /app');
}
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
if (isSecretFound.length > 0) {
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
} else {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
if (buildCommand) {
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
} else {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
}
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push('RUN rm -fr .git');
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import { executeDockerCmd } from './common';
import { executeCommand } from './common';
export function formatLabelsOnDocker(data) {
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
@@ -16,7 +16,7 @@ export function formatLabelsOnDocker(data) {
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
let containerFound = false;
try {
const { stdout } = await executeDockerCmd({
const { stdout } = await executeCommand({
dockerId,
command:
`docker inspect --format '{{json .State}}' ${container}`
@@ -28,27 +28,26 @@ export async function checkContainer({ dockerId, container, remove = false }: {
const isRestarting = status === 'restarting'
const isExited = status === 'exited'
if (status === 'created') {
await executeDockerCmd({
await executeCommand({
dockerId,
command:
`docker rm ${container}`
});
}
if (remove && status === 'exited') {
await executeDockerCmd({
await executeCommand({
dockerId,
command:
`docker rm ${container}`
});
}
return {
found: containerFound,
status: {
isRunning,
isRestarting,
isExited
}
};
} catch (err) {
@@ -63,7 +62,7 @@ export async function checkContainer({ dockerId, container, remove = false }: {
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
let isExited = false;
try {
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
if (stdout.trim() === 'exited') {
isExited = true;
}
@@ -82,10 +81,13 @@ export async function removeContainer({
dockerId: string;
}): Promise<void> {
try {
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
if (JSON.parse(stdout).Running) {
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
await executeCommand({ dockerId, command: `docker rm ${id}` })
}
if (JSON.parse(stdout).Status === 'exited') {
await executeCommand({ dockerId, command: `docker rm ${id}` })
}
} catch (error) {
throw error;

View File

@@ -1,7 +1,7 @@
import jsonwebtoken from 'jsonwebtoken';
import { saveBuildLog } from '../buildPacks/common';
import { asyncExecShell, decrypt, prisma } from '../common';
import { decrypt, executeCommand, prisma } from '../common';
export default async function ({
applicationId,
@@ -9,6 +9,7 @@ export default async function ({
githubAppId,
repository,
apiUrl,
gitCommitHash,
htmlUrl,
branch,
buildId,
@@ -20,6 +21,7 @@ export default async function ({
githubAppId: string;
repository: string;
apiUrl: string;
gitCommitHash?: string;
htmlUrl: string;
branch: string;
buildId: string;
@@ -28,16 +30,24 @@ export default async function ({
}): Promise<string> {
const { default: got } = await import('got')
const url = htmlUrl.replace('https://', '').replace('http://', '');
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
if (forPublic) {
await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`,
line: `Cloning ${repository}:${branch}...`,
buildId,
applicationId
});
await asyncExecShell(
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
);
if (gitCommitHash) {
await saveBuildLog({
line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
await executeCommand({
command:
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
shell: true
});
} else {
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
@@ -62,17 +72,23 @@ export default async function ({
})
.json();
await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`,
line: `Cloning ${repository}:${branch}...`,
buildId,
applicationId
});
await asyncExecShell(
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
);
if (gitCommitHash) {
await saveBuildLog({
line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
await executeCommand({
command:
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
shell: true
});
}
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
return commit.replace('\n', '');
}

View File

@@ -1,17 +1,19 @@
import { saveBuildLog } from "../buildPacks/common";
import { asyncExecShell } from "../common";
import { executeCommand } from "../common";
export default async function ({
applicationId,
workdir,
repodir,
htmlUrl,
gitCommitHash,
repository,
branch,
buildId,
privateSshKey,
customPort,
forPublic
forPublic,
customUser,
}: {
applicationId: string;
workdir: string;
@@ -20,34 +22,44 @@ export default async function ({
branch: string;
buildId: string;
repodir: string;
gitCommitHash: string;
privateSshKey: string;
customPort: number;
forPublic: boolean;
customUser: string;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
if (!forPublic) {
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
}
await saveBuildLog({
line: `Cloning ${repository}:${branch} branch.`,
line: `Cloning ${repository}:${branch}...`,
buildId,
applicationId
});
if (gitCommitHash) {
await saveBuildLog({
line: `Checking out ${gitCommitHash} commit...`,
buildId,
applicationId
});
}
if (forPublic) {
await asyncExecShell(
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
await executeCommand({
command:
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
}
);
} else {
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
await executeCommand({
command:
`git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
}
);
}
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
return commit.replace('\n', '');
}

View File

@@ -9,17 +9,16 @@ Bree.extend(TSBree);
const options: any = {
defaultExtension: 'js',
// logger: new Cabin(),
logger: false,
workerMessageHandler: async ({ name, message }) => {
if (name === 'deployApplication' && message?.deploying) {
if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
scheduler.workers.get('deployApplication').postMessage('cancel')
}
}
},
// logger: false,
// workerMessageHandler: async ({ name, message }) => {
// if (name === 'deployApplication' && message?.deploying) {
// if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
// scheduler.workers.get('deployApplication').postMessage('cancel')
// }
// }
// },
jobs: [
{ name: 'infrastructure' },
{ name: 'deployApplication' },
],
};

View File

@@ -1,20 +1,47 @@
import { createDirectories, getServiceFromDB, getServiceImage, getServiceMainPort, makeLabelForServices } from "./common";
export async function defaultServiceConfigurations({ id, teamId }) {
const service = await getServiceFromDB({ id, teamId });
const { destinationDockerId, destinationDocker, type, serviceSecret } = service;
const network = destinationDockerId && destinationDocker.network;
const port = getServiceMainPort(type);
const { workdir } = await createDirectories({ repository: type, buildId: id });
const image = getServiceImage(type);
let secrets = [];
if (serviceSecret.length > 0) {
serviceSecret.forEach((secret) => {
secrets.push(`${secret.name}=${secret.value}`);
});
}
return { ...service, network, port, workdir, image, secrets }
}
import { isARM, isDev } from './common';
import fs from 'fs/promises';
export async function getTemplates() {
const templatePath = isDev ? './templates.json' : '/app/templates.json';
const open = await fs.open(templatePath, 'r');
try {
let data = await open.readFile({ encoding: 'utf-8' });
let jsonData = JSON.parse(data);
if (isARM()) {
jsonData = jsonData.filter((d) => d.arch !== 'amd64');
}
return jsonData;
} catch (error) {
return [];
} finally {
await open?.close();
}
}
const compareSemanticVersions = (a: string, b: string) => {
const a1 = a.split('.');
const b1 = b.split('.');
const len = Math.min(a1.length, b1.length);
for (let i = 0; i < len; i++) {
const a2 = +a1[i] || 0;
const b2 = +b1[i] || 0;
if (a2 !== b2) {
return a2 > b2 ? 1 : -1;
}
}
return b1.length - a1.length;
};
export async function getTags(type: string) {
try {
if (type) {
const tagsPath = isDev ? './tags.json' : '/app/tags.json';
const data = await fs.readFile(tagsPath, 'utf8');
let tags = JSON.parse(data);
if (tags) {
tags = tags.find((tag: any) => tag.name.includes(type));
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
return tags;
}
}
} catch (error) {
return [];
}
}

View File

@@ -1,367 +1,9 @@
import cuid from 'cuid';
import { encrypt, generatePassword, prisma } from '../common';
export const includeServices: any = {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true,
meiliSearch: true,
umami: true,
hasura: true,
fider: true,
moodle: true,
appwrite: true,
glitchTip: true,
searxng: true,
weblate: true,
taiga: true
};
export async function configureServiceType({
id,
type
}: {
id: string;
type: string;
}): Promise<void> {
if (type === 'plausibleanalytics') {
const password = encrypt(generatePassword({}));
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'plausibleanalytics';
const secretKeyBase = encrypt(generatePassword({ length: 64 }));
await prisma.service.update({
where: { id },
data: {
type,
plausibleAnalytics: {
create: {
postgresqlDatabase,
postgresqlUser,
postgresqlPassword,
password,
secretKeyBase
}
}
}
});
} else if (type === 'nocodb') {
await prisma.service.update({
where: { id },
data: { type }
});
} else if (type === 'minio') {
const rootUser = cuid();
const rootUserPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: { type, minio: { create: { rootUser, rootUserPassword } } }
});
} else if (type === 'vscodeserver') {
const password = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: { type, vscodeserver: { create: { password } } }
});
} else if (type === 'wordpress') {
const mysqlUser = cuid();
const mysqlPassword = encrypt(generatePassword({}));
const mysqlRootUser = cuid();
const mysqlRootUserPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
wordpress: { create: { mysqlPassword, mysqlRootUserPassword, mysqlRootUser, mysqlUser } }
}
});
} else if (type === 'vaultwarden') {
await prisma.service.update({
where: { id },
data: {
type
}
});
} else if (type === 'languagetool') {
await prisma.service.update({
where: { id },
data: {
type
}
});
} else if (type === 'n8n') {
await prisma.service.update({
where: { id },
data: {
type
}
});
} else if (type === 'uptimekuma') {
await prisma.service.update({
where: { id },
data: {
type
}
});
} else if (type === 'ghost') {
const defaultEmail = `${cuid()}@example.com`;
const defaultPassword = encrypt(generatePassword({}));
const mariadbUser = cuid();
const mariadbPassword = encrypt(generatePassword({}));
const mariadbRootUser = cuid();
const mariadbRootUserPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
ghost: {
create: {
defaultEmail,
defaultPassword,
mariadbUser,
mariadbPassword,
mariadbRootUser,
mariadbRootUserPassword
}
}
}
});
} else if (type === 'meilisearch') {
const masterKey = encrypt(generatePassword({ length: 32 }));
await prisma.service.update({
where: { id },
data: {
type,
meiliSearch: { create: { masterKey } }
}
});
} else if (type === 'umami') {
const umamiAdminPassword = encrypt(generatePassword({}));
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'umami';
const hashSalt = encrypt(generatePassword({ length: 64 }));
await prisma.service.update({
where: { id },
data: {
type,
umami: {
create: {
umamiAdminPassword,
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
hashSalt
}
}
}
});
} else if (type === 'hasura') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'hasura';
const graphQLAdminPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
hasura: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
graphQLAdminPassword
}
}
}
});
} else if (type === 'fider') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'fider';
const jwtSecret = encrypt(generatePassword({ length: 64, symbols: true }));
await prisma.service.update({
where: { id },
data: {
type,
fider: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
jwtSecret
}
}
}
});
} else if (type === 'moodle') {
const defaultUsername = cuid();
const defaultPassword = encrypt(generatePassword({}));
const defaultEmail = `${cuid()} @example.com`;
const mariadbUser = cuid();
const mariadbPassword = encrypt(generatePassword({}));
const mariadbDatabase = 'moodle_db';
const mariadbRootUser = cuid();
const mariadbRootUserPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
moodle: {
create: {
defaultUsername,
defaultPassword,
defaultEmail,
mariadbUser,
mariadbPassword,
mariadbDatabase,
mariadbRootUser,
mariadbRootUserPassword
}
}
}
});
} else if (type === 'appwrite') {
const opensslKeyV1 = encrypt(generatePassword({}));
const executorSecret = encrypt(generatePassword({}));
const redisPassword = encrypt(generatePassword({}));
const mariadbHost = `${id}-mariadb`
const mariadbUser = cuid();
const mariadbPassword = encrypt(generatePassword({}));
const mariadbDatabase = 'appwrite';
const mariadbRootUser = cuid();
const mariadbRootUserPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
appwrite: {
create: {
opensslKeyV1,
executorSecret,
redisPassword,
mariadbHost,
mariadbUser,
mariadbPassword,
mariadbDatabase,
mariadbRootUser,
mariadbRootUserPassword
}
}
}
});
} else if (type === 'glitchTip') {
const defaultUsername = cuid();
const defaultEmail = `${defaultUsername}@example.com`;
const defaultPassword = encrypt(generatePassword({}));
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'glitchTip';
const secretKeyBase = encrypt(generatePassword({ length: 64 }));
await prisma.service.update({
where: { id },
data: {
type,
glitchTip: {
create: {
postgresqlDatabase,
postgresqlUser,
postgresqlPassword,
secretKeyBase,
defaultEmail,
defaultUsername,
defaultPassword,
}
}
}
});
} else if (type === 'searxng') {
const secretKey = encrypt(generatePassword({ length: 32, isHex: true }))
const redisPassword = encrypt(generatePassword({}));
await prisma.service.update({
where: { id },
data: {
type,
searxng: {
create: {
secretKey,
redisPassword,
}
}
}
});
} else if (type === 'weblate') {
const adminPassword = encrypt(generatePassword({}))
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'weblate';
await prisma.service.update({
where: { id },
data: {
type,
weblate: {
create: {
adminPassword,
postgresqlHost: `${id}-postgresql`,
postgresqlPort: 5432,
postgresqlUser,
postgresqlPassword,
postgresqlDatabase,
}
}
}
});
} else if (type === 'taiga') {
const secretKey = encrypt(generatePassword({}))
const erlangSecret = encrypt(generatePassword({}))
const rabbitMQUser = cuid();
const djangoAdminUser = cuid();
const djangoAdminPassword = encrypt(generatePassword({}))
const rabbitMQPassword = encrypt(generatePassword({}))
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword({}));
const postgresqlDatabase = 'taiga';
await prisma.service.update({
where: { id },
data: {
type,
taiga: {
create: {
secretKey,
erlangSecret,
djangoAdminUser,
djangoAdminPassword,
rabbitMQUser,
rabbitMQPassword,
postgresqlHost: `${id}-postgresql`,
postgresqlPort: 5432,
postgresqlUser,
postgresqlPassword,
postgresqlDatabase,
}
}
}
});
} else {
await prisma.service.update({
where: { id },
data: {
type
}
});
}
}
import { decrypt, prisma } from '../common';
export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
await prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
await prisma.fider.deleteMany({ where: { serviceId: id } });
@@ -378,6 +20,20 @@ export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.searxng.deleteMany({ where: { serviceId: id } });
await prisma.weblate.deleteMany({ where: { serviceId: id } });
await prisma.taiga.deleteMany({ where: { serviceId: id } });
await prisma.service.delete({ where: { id } });
}
export async function verifyAndDecryptServiceSecrets(id: string) {
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
let decryptedSecrets = secrets.map(secret => {
const { name, value } = secret
if (value) {
let rawValue = decrypt(value)
rawValue = rawValue.replaceAll(/\$/gi, '$$$')
return { name, value: rawValue }
}
return { name, value }
})
return decryptedSecrets
}

File diff suppressed because it is too large Load Diff

View File

@@ -624,7 +624,7 @@ export const glitchTip = [{
isEncrypted: false
},
{
name: 'emailSmtpUseSsl',
name: 'emailSmtpUseTls',
isEditable: true,
isLowerCase: false,
isNumber: false,

View File

@@ -1,236 +0,0 @@
/*
Example of a supported version:
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
*/
export const supportedServiceTypesAndVersions = [
{
name: 'plausibleanalytics',
fancyName: 'Plausible Analytics',
baseImage: 'plausible/analytics',
images: ['bitnami/postgresql:13.2.0', 'yandex/clickhouse-server:21.3.2.5'],
versions: ['latest', 'stable'],
recommendedVersion: 'stable',
ports: {
main: 8000
}
},
{
name: 'nocodb',
fancyName: 'NocoDB',
baseImage: 'nocodb/nocodb',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8080
}
},
{
name: 'minio',
fancyName: 'MinIO',
baseImage: 'minio/minio',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 9001
}
},
{
name: 'vscodeserver',
fancyName: 'VSCode Server',
baseImage: 'codercom/code-server',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8080
}
},
{
name: 'wordpress',
fancyName: 'Wordpress',
baseImage: 'wordpress',
images: ['bitnami/mysql:5.7'],
versions: ['latest', 'php8.1', 'php8.0', 'php7.4', 'php7.3'],
recommendedVersion: 'latest',
ports: {
main: 80
}
},
{
name: 'vaultwarden',
fancyName: 'Vaultwarden',
baseImage: 'vaultwarden/server',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 80
}
},
{
name: 'languagetool',
fancyName: 'LanguageTool',
baseImage: 'silviof/docker-languagetool',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8010
}
},
{
name: 'n8n',
fancyName: 'n8n',
baseImage: 'n8nio/n8n',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 5678
}
},
{
name: 'uptimekuma',
fancyName: 'Uptime Kuma',
baseImage: 'louislam/uptime-kuma',
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 3001
}
},
{
name: 'ghost',
fancyName: 'Ghost',
baseImage: 'bitnami/ghost',
images: ['bitnami/mariadb'],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 2368
}
},
{
name: 'meilisearch',
fancyName: 'Meilisearch',
baseImage: 'getmeili/meilisearch',
images: [],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 7700
}
},
{
name: 'umami',
fancyName: 'Umami',
baseImage: 'ghcr.io/umami-software/umami',
images: ['postgres:12-alpine'],
versions: ['postgresql-latest'],
recommendedVersion: 'postgresql-latest',
ports: {
main: 3000
}
},
{
name: 'hasura',
fancyName: 'Hasura',
baseImage: 'hasura/graphql-engine',
images: ['postgres:12-alpine'],
versions: ['latest', 'v2.10.0', 'v2.5.1'],
recommendedVersion: 'v2.10.0',
ports: {
main: 8080
}
},
{
name: 'fider',
fancyName: 'Fider',
baseImage: 'getfider/fider',
images: ['postgres:12-alpine'],
versions: ['stable'],
recommendedVersion: 'stable',
ports: {
main: 3000
}
},
{
name: 'appwrite',
fancyName: 'Appwrite',
baseImage: 'appwrite/appwrite',
images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'],
versions: ['latest', '1.0','0.15.3'],
recommendedVersion: '0.15.3',
ports: {
main: 80
}
},
// {
// name: 'moodle',
// fancyName: 'Moodle',
// baseImage: 'bitnami/moodle',
// images: [],
// versions: ['latest', 'v4.0.2'],
// recommendedVersion: 'latest',
// ports: {
// main: 8080
// }
// }
{
name: 'glitchTip',
fancyName: 'GlitchTip',
baseImage: 'glitchtip/glitchtip',
images: ['postgres:14-alpine', 'redis:7-alpine'],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8000
}
},
{
name: 'searxng',
fancyName: 'SearXNG',
baseImage: 'searxng/searxng',
images: [],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8080
}
},
{
name: 'weblate',
fancyName: 'Weblate',
baseImage: 'weblate/weblate',
images: ['postgres:14-alpine', 'redis:6-alpine'],
versions: ['latest'],
recommendedVersion: 'latest',
ports: {
main: 8080
}
},
// {
// name: 'taiga',
// fancyName: 'Taiga',
// baseImage: 'taigaio/taiga-front',
// images: ['postgres:12.3', 'rabbitmq:3.8-management-alpine', 'taigaio/taiga-back', 'taigaio/taiga-events', 'taigaio/taiga-protected'],
// versions: ['latest'],
// recommendedVersion: 'latest',
// ports: {
// main: 80
// }
// },
];

View File

@@ -1,33 +1,37 @@
import fp from 'fastify-plugin'
import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt'
import fp from 'fastify-plugin';
import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt';
declare module "@fastify/jwt" {
interface FastifyJWT {
user: {
userId: string,
teamId: string,
permission: string,
isAdmin: boolean
}
}
declare module '@fastify/jwt' {
interface FastifyJWT {
user: {
userId: string;
teamId: string;
permission: string;
isAdmin: boolean;
};
}
}
export default fp<FastifyJWTOptions>(async (fastify, opts) => {
fastify.register(fastifyJwt, {
secret: fastify.config.COOLIFY_SECRET_KEY
})
let secretKey = fastify.config.COOLIFY_SECRET_KEY_BETTER;
if (!secretKey) {
secretKey = fastify.config.COOLIFY_SECRET_KEY;
}
fastify.register(fastifyJwt, {
secret: secretKey
});
fastify.decorate("authenticate", async function (request, reply) {
try {
await request.jwtVerify()
} catch (err) {
reply.send(err)
}
})
})
fastify.decorate('authenticate', async function (request, reply) {
try {
await request.jwtVerify();
} catch (err) {
reply.send(err);
}
});
});
declare module 'fastify' {
export interface FastifyInstance {
authenticate(): Promise<void>
}
export interface FastifyInstance {
authenticate(): Promise<void>;
}
}

View File

@@ -0,0 +1,29 @@
export default async (fastify) => {
fastify.io.use((socket, next) => {
const { token } = socket.handshake.auth;
if (token && fastify.jwt.verify(token)) {
next();
} else {
return next(new Error("unauthorized event"));
}
});
fastify.io.on('connection', (socket: any) => {
const { token } = socket.handshake.auth;
const { teamId } = fastify.jwt.decode(token);
socket.join(teamId);
// console.info('Socket connected!', socket.id)
// console.info('Socket joined team!', teamId)
// socket.on('message', (message) => {
// console.log(message)
// })
// socket.on('error', (err) => {
// console.log(err)
// })
})
// fastify.io.on("error", (err) => {
// if (err && err.message === "unauthorized event") {
// fastify.io.disconnect();
// }
// });
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
import { FastifyPluginAsync } from 'fastify';
import { OnlyId } from '../../../../types';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers';
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getDockerImages, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRegistry, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartApplication, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
@@ -11,6 +11,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => await listApplications(request));
fastify.post<GetImages>('/images', async (request) => await getImages(request));
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredApplications(request));
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
fastify.get<OnlyId>('/:id', async (request) => await getApplication(request));
@@ -19,7 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
fastify.post<OnlyId>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
fastify.post<RestartApplication>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
@@ -30,6 +32,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request));
fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
fastify.put<SaveSecret>('/:id/secrets', async (request, reply) => await updateSecret(request, reply));
fastify.put<SaveSecret>('/:id/secrets/preview', async (request, reply) => await updatePreviewSecret(request, reply));
fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request));
fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request));
@@ -41,11 +45,14 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
fastify.get('/:id/usage', async (request) => await getUsage(request))
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
fastify.get('/:id/images', async (request) => await getDockerImages(request))
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
@@ -58,6 +65,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
fastify.post('/:id/configuration/registry', async (request, reply) => await saveRegistry(request, reply));
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));

View File

@@ -1,136 +1,174 @@
import type { OnlyId } from "../../../../types";
import type { OnlyId } from '../../../../types';
export interface SaveApplication extends OnlyId {
Body: {
name: string,
buildPack: string,
fqdn: string,
port: number,
exposePort: number,
installCommand: string,
buildCommand: string,
startCommand: string,
baseDirectory: string,
publishDirectory: string,
pythonWSGI: string,
pythonModule: string,
pythonVariable: string,
dockerFileLocation: string,
denoMainFile: string,
denoOptions: string,
baseImage: string,
baseBuildImage: string,
deploymentType: string,
baseDatabaseBranch: string
}
Body: {
name: string;
buildPack: string;
fqdn: string;
port: number;
exposePort: number;
installCommand: string;
buildCommand: string;
startCommand: string;
baseDirectory: string;
publishDirectory: string;
pythonWSGI: string;
pythonModule: string;
pythonVariable: string;
dockerFileLocation: string;
denoMainFile: string;
denoOptions: string;
baseImage: string;
gitCommitHash: string;
baseBuildImage: string;
deploymentType: string;
baseDatabaseBranch: string;
dockerComposeFile: string;
dockerComposeFileLocation: string;
dockerComposeConfiguration: string;
simpleDockerfile: string;
dockerRegistryImageName: string;
basicAuthPw: string;
basicAuthUser: string;
};
}
export interface SaveApplicationSettings extends OnlyId {
Querystring: { domain: string; };
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean };
Querystring: { domain: string };
Body: {
debug: boolean;
previews: boolean;
dualCerts: boolean;
autodeploy: boolean;
branch: string;
projectId: number;
isBot: boolean;
isDBBranching: boolean;
isCustomSSL: boolean;
isHttp2: boolean;
basicAuth: boolean;
};
}
export interface DeleteApplication extends OnlyId {
Querystring: { domain: string; };
Body: { force: boolean }
Querystring: { domain: string };
Body: { force: boolean };
}
export interface CheckDomain extends OnlyId {
Querystring: { domain: string; };
Querystring: { domain: string };
}
export interface CheckDNS extends OnlyId {
Querystring: { domain: string; };
Body: {
exposePort: number,
fqdn: string,
forceSave: boolean,
dualCerts: boolean
}
Querystring: { domain: string };
Body: {
exposePort: number;
fqdn: string;
forceSave: boolean;
dualCerts: boolean;
};
}
export interface DeployApplication {
Querystring: { domain: string }
Body: { pullmergeRequestId: string | null, branch: string, forceRebuild?: boolean }
Querystring: { domain: string };
Body: { pullmergeRequestId: string | null; branch: string; forceRebuild?: boolean };
}
export interface GetImages {
Body: { buildPack: string, deploymentType: string }
Body: { buildPack: string; deploymentType: string };
}
export interface SaveApplicationSource extends OnlyId {
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string }
Body: {
gitSourceId?: string | null;
forPublic?: boolean;
type?: string;
simpleDockerfile?: string;
};
}
export interface CheckRepository extends OnlyId {
Querystring: { repository: string, branch: string }
Querystring: { repository: string; branch: string };
}
export interface SaveDestination extends OnlyId {
Body: { destinationId: string }
Body: { destinationId: string };
}
export interface SaveSecret extends OnlyId {
Body: {
name: string,
value: string,
isBuildSecret: boolean,
isPRMRSecret: boolean,
isNew: boolean
}
Body: {
name: string;
value: string;
isBuildSecret: boolean;
previewSecret: boolean;
isNew: boolean;
};
}
export interface DeleteSecret extends OnlyId {
Body: { name: string }
Body: { name: string };
}
export interface SaveStorage extends OnlyId {
Body: {
path: string,
newStorage: boolean,
storageId: string
}
Body: {
hostPath?: string;
path: string;
newStorage: boolean;
storageId: string;
};
}
export interface DeleteStorage extends OnlyId {
Body: {
path: string,
}
Body: {
path: string;
};
}
export interface GetApplicationLogs extends OnlyId {
Querystring: {
since: number,
}
export interface GetApplicationLogs {
Params: {
id: string;
containerId: string;
};
Querystring: {
since: number;
};
}
export interface GetBuilds extends OnlyId {
Querystring: {
buildId: string
skip: number,
}
Querystring: {
buildId: string;
skip: number;
};
}
export interface GetBuildIdLogs {
Params: {
id: string,
buildId: string
},
Querystring: {
sequence: number
}
Params: {
id: string;
buildId: string;
};
Querystring: {
sequence: number;
};
}
export interface SaveDeployKey extends OnlyId {
Body: {
deployKeyId: number
}
Body: {
deployKeyId: number;
};
}
export interface CancelDeployment {
Body: {
buildId: string,
applicationId: string
}
Body: {
buildId: string;
applicationId: string;
};
}
export interface DeployApplication extends OnlyId {
Body: {
pullmergeRequestId: string | null,
branch: string,
forceRebuild?: boolean
}
Body: {
pullmergeRequestId: string | null;
branch: string;
forceRebuild?: boolean;
};
}
export interface StopPreviewApplication extends OnlyId {
Body: {
pullmergeRequestId: string | null,
}
Body: {
pullmergeRequestId: string | null;
};
}
export interface RestartPreviewApplication {
Params: {
id: string,
pullmergeRequestId: string | null,
}
}
Params: {
id: string;
pullmergeRequestId: string | null;
};
}
export interface RestartApplication {
Params: {
id: string;
};
Body: {
imageId: string | null;
};
}

View File

@@ -1,23 +1,31 @@
import { FastifyPluginAsync } from 'fastify';
import { errorHandler, listSettings, version } from '../../../../lib/common';
import { errorHandler, isARM, listSettings, version } from '../../../../lib/common';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async () => {
const settings = await listSettings()
try {
return {
ipv4: settings.ipv4,
ipv6: settings.ipv6,
version,
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
isRegistrationEnabled: settings.isRegistrationEnabled,
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
});
fastify.addHook('onRequest', async (request) => {
try {
await request.jwtVerify();
} catch (error) {
return;
}
});
fastify.get('/', async (request) => {
const teamId = request.user?.teamId;
const settings = await listSettings();
try {
return {
ipv4: teamId ? settings.ipv4 : null,
ipv6: teamId ? settings.ipv6 : null,
version,
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
isRegistrationEnabled: settings.isRegistrationEnabled,
isARM: isARM()
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
});
};
export default root;

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
import { FastifyPluginAsync } from 'fastify';
import { deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
import { backupDatabase, cleanupUnconfiguredDatabases, deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
import type { OnlyId } from '../../../../types';
@@ -12,6 +12,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async (request) => await listDatabases(request));
fastify.post('/new', async (request, reply) => await newDatabase(request, reply));
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredDatabases(request));
fastify.get<OnlyId>('/:id', async (request) => await getDatabase(request));
fastify.post<SaveDatabase>('/:id', async (request, reply) => await saveDatabase(request, reply));
fastify.delete<DeleteDatabase>('/:id', async (request) => await deleteDatabase(request));
@@ -37,6 +39,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.post<OnlyId>('/:id/start', async (request) => await startDatabase(request));
fastify.post<OnlyId>('/:id/stop', async (request) => await stopDatabase(request));
fastify.post<OnlyId>('/:id/backup', async (request, reply) => await backupDatabase(request, reply));
};
export default root;

View File

@@ -4,7 +4,7 @@ export interface SaveDatabaseType extends OnlyId {
Body: { type: string }
}
export interface DeleteDatabase extends OnlyId {
Body: { force: string }
Body: { }
}
export interface SaveVersion extends OnlyId {
Body: {

View File

@@ -1,239 +1,384 @@
import type { FastifyRequest } from 'fastify';
import { FastifyReply } from 'fastify';
import sshConfig from 'ssh-config'
import fs from 'fs/promises'
import os from 'os';
import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
import {
errorHandler,
executeCommand,
listSettings,
prisma,
startTraefikProxy,
stopTraefikProxy
} from '../../../../lib/common';
import { checkContainer } from '../../../../lib/docker';
import type { OnlyId } from '../../../../types';
import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types';
import type {
CheckDestination,
ListDestinations,
NewDestination,
Proxy,
SaveDestinationSettings
} from './types';
import { removeService } from '../../../../lib/services/common';
export async function listDestinations(request: FastifyRequest<ListDestinations>) {
try {
const teamId = request.user.teamId;
const { onlyVerified = false } = request.query
let destinations = []
if (teamId === '0') {
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
} else {
destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId } } },
include: { teams: true }
});
}
if (onlyVerified) {
destinations = destinations.filter(destination => destination.engine || (destination.remoteEngine && destination.remoteVerified))
}
return {
destinations
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const teamId = request.user.teamId;
const { onlyVerified = false } = request.query;
let destinations = [];
if (teamId === '0') {
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
} else {
destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId } } },
include: { teams: true }
});
}
if (onlyVerified) {
destinations = destinations.filter(
(destination) =>
destination.engine || (destination.remoteEngine && destination.remoteVerified)
);
}
return {
destinations
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function checkDestination(request: FastifyRequest<CheckDestination>) {
try {
const { network } = request.body;
const found = await prisma.destinationDocker.findFirst({ where: { network } });
if (found) {
throw {
message: `Network already exists: ${network}`
};
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const { network } = request.body;
const found = await prisma.destinationDocker.findFirst({ where: { network } });
if (found) {
throw {
message: `Network already exists: ${network}`
};
}
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function getDestination(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const teamId = request.user?.teamId;
const destination = await prisma.destinationDocker.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { sshKey: true, application: true, service: true, database: true }
});
if (!destination && id !== 'new') {
throw { status: 404, message: `Destination not found.` };
}
const settings = await listSettings();
const payload = {
destination,
settings
};
return {
...payload
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const { id } = request.params;
const teamId = request.user?.teamId;
const destination = await prisma.destinationDocker.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { sshKey: true, application: true, service: true, database: true }
});
if (!destination && id !== 'new') {
throw { status: 404, message: `Destination not found.` };
}
const settings = await listSettings();
const payload = {
destination,
settings
};
return {
...payload
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function newDestination(request: FastifyRequest<NewDestination>, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const { id } = request.params
try {
const teamId = request.user.teamId;
const { id } = request.params;
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
if (id === 'new') {
if (engine) {
const { stdout } = await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network ls --filter 'name=^${network}$' --format '{{json .}}'`);
if (stdout === '') {
await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network create --attachable ${network}`);
}
await prisma.destinationDocker.create({
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
});
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
const destination = destinations.find((destination) => destination.network === network);
if (destinations.length > 0) {
const proxyConfigured = destinations.find(
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
);
if (proxyConfigured) {
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
}
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
}
if (isCoolifyProxyUsed) {
await startTraefikProxy(destination.id);
}
return reply.code(201).send({ id: destination.id });
} else {
const destination = await prisma.destinationDocker.create({
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort }
});
return reply.code(201).send({ id: destination.id })
}
} else {
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
return reply.code(201).send();
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } =
request.body;
if (id === 'new') {
if (engine) {
const { stdout } = await await executeCommand({
command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'`
});
if (stdout === '') {
await await executeCommand({ command: `docker network create --attachable ${network}` });
}
await prisma.destinationDocker.create({
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
});
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
const destination = destinations.find((destination) => destination.network === network);
if (destinations.length > 0) {
const proxyConfigured = destinations.find(
(destination) =>
destination.network !== network && destination.isCoolifyProxyUsed === true
);
if (proxyConfigured) {
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
}
await prisma.destinationDocker.updateMany({
where: { engine },
data: { isCoolifyProxyUsed }
});
}
if (isCoolifyProxyUsed) {
await startTraefikProxy(destination.id);
}
return reply.code(201).send({ id: destination.id });
} else {
const destination = await prisma.destinationDocker.create({
data: {
name,
teams: { connect: { id: teamId } },
engine,
network,
isCoolifyProxyUsed,
remoteEngine: true,
remoteIpAddress,
remoteUser,
remotePort: Number(remotePort)
}
});
return reply.code(201).send({ id: destination.id });
}
} else {
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
return reply.code(201).send();
}
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function forceDeleteDestination(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params;
const services = await prisma.service.findMany({ where: { destinationDockerId: id } });
for (const service of services) {
await removeService({ id: service.id });
}
const applications = await prisma.application.findMany({ where: { destinationDockerId: id } });
for (const application of applications) {
await prisma.applicationSettings.deleteMany({ where: { application: { id: application.id } } });
await prisma.buildLog.deleteMany({ where: { applicationId: application.id } });
await prisma.build.deleteMany({ where: { applicationId: application.id } });
await prisma.secret.deleteMany({ where: { applicationId: application.id } });
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: application.id } });
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: application.id } });
await prisma.previewApplication.deleteMany({ where: { applicationId: application.id } });
}
const databases = await prisma.database.findMany({ where: { destinationDockerId: id } });
for (const database of databases) {
await prisma.databaseSettings.deleteMany({ where: { databaseId: database.id } });
await prisma.databaseSecret.deleteMany({ where: { databaseId: database.id } });
await prisma.database.delete({ where: { id: database.id } });
}
await prisma.destinationDocker.delete({ where: { id } });
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function deleteDestination(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
if (isCoolifyProxyUsed) {
if (engine || remoteVerified) {
const { stdout: found } = await executeDockerCmd({
dockerId: id,
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
})
if (found) {
await executeDockerCmd({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
await executeDockerCmd({ dockerId: id, command: `docker network rm ${network}` })
}
}
}
await prisma.destinationDocker.delete({ where: { id } });
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const { id } = request.params;
const appFound = await prisma.application.findFirst({ where: { destinationDockerId: id } });
const serviceFound = await prisma.service.findFirst({ where: { destinationDockerId: id } });
const databaseFound = await prisma.database.findFirst({ where: { destinationDockerId: id } });
if (appFound || serviceFound || databaseFound) {
throw {
message: `Destination is in use.<br>Remove all applications, services and databases using this destination first.`
};
}
const { network, remoteVerified, engine, isCoolifyProxyUsed } =
await prisma.destinationDocker.findUnique({ where: { id } });
if (isCoolifyProxyUsed) {
if (engine || remoteVerified) {
const { stdout: found } = await executeCommand({
dockerId: id,
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
});
if (found) {
await executeCommand({
dockerId: id,
command: `docker network disconnect ${network} coolify-proxy`
});
await executeCommand({ dockerId: id, command: `docker network rm ${network}` });
}
}
}
await prisma.destinationDocker.delete({ where: { id } });
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function saveDestinationSettings(request: FastifyRequest<SaveDestinationSettings>) {
try {
const { engine, isCoolifyProxyUsed } = request.body;
await prisma.destinationDocker.updateMany({
where: { engine },
data: { isCoolifyProxyUsed }
});
try {
const { engine, isCoolifyProxyUsed } = request.body;
await prisma.destinationDocker.updateMany({
where: { engine },
data: { isCoolifyProxyUsed }
});
return {
status: 202
}
// return reply.code(201).send();
} catch ({ status, message }) {
return errorHandler({ status, message })
}
return {
status: 202
};
// return reply.code(201).send();
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function startProxy(request: FastifyRequest<Proxy>) {
const { id } = request.params
try {
await startTraefikProxy(id);
return {}
} catch ({ status, message }) {
await stopTraefikProxy(id);
return errorHandler({ status, message })
}
const { id } = request.params;
try {
await startTraefikProxy(id);
return {};
} catch ({ status, message }) {
await stopTraefikProxy(id);
return errorHandler({ status, message });
}
}
export async function stopProxy(request: FastifyRequest<Proxy>) {
const { id } = request.params
try {
await stopTraefikProxy(id);
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
const { id } = request.params;
try {
await stopTraefikProxy(id);
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function restartProxy(request: FastifyRequest<Proxy>) {
const { id } = request.params
try {
await stopTraefikProxy(id);
await startTraefikProxy(id);
await prisma.destinationDocker.update({
where: { id },
data: { isCoolifyProxyUsed: true }
});
return {}
} catch ({ status, message }) {
await prisma.destinationDocker.update({
where: { id },
data: { isCoolifyProxyUsed: false }
});
return errorHandler({ status, message })
}
const { id } = request.params;
try {
await stopTraefikProxy(id);
await startTraefikProxy(id);
await prisma.destinationDocker.update({
where: { id },
data: { isCoolifyProxyUsed: true }
});
return {};
} catch ({ status, message }) {
await prisma.destinationDocker.update({
where: { id },
data: { isCoolifyProxyUsed: false }
});
return errorHandler({ status, message });
}
}
export async function assignSSHKey(request: FastifyRequest) {
try {
const { id: sshKeyId } = request.body;
const { id } = request.params;
await prisma.destinationDocker.update({ where: { id }, data: { sshKey: { connect: { id: sshKeyId } } } })
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const { id: sshKeyId } = request.body;
const { id } = request.params;
await prisma.destinationDocker.update({
where: { id },
data: { sshKey: { connect: { id: sshKeyId } } }
});
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params;
await createRemoteEngineConfiguration(id);
const { remoteIpAddress, remoteUser, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
const host = `ssh://${remoteUser}@${remoteIpAddress}`
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
if (!stdout) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
}
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
if (!coolifyNetwork) {
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
return reply.code(201).send()
export async function verifyRemoteDockerEngineFn(id: string) {
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst(
{ where: { id } }
);
const daemonJson = `daemon-${id}.json`;
try {
await executeCommand({
sshCommand: true,
command: `docker network inspect ${network}`,
dockerId: id
});
} catch (error) {
await executeCommand({
command: `docker network create --attachable ${network}`,
dockerId: id
});
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
await executeCommand({
sshCommand: true,
command: `docker network inspect coolify-infra`,
dockerId: id
});
} catch (error) {
await executeCommand({
command: `docker network create --attachable coolify-infra`,
dockerId: id
});
}
if (isCoolifyProxyUsed) await startTraefikProxy(id);
let isUpdated = false;
let daemonJsonParsed = {
'live-restore': true,
features: {
buildkit: true
}
};
try {
const { stdout: daemonJson } = await executeCommand({
sshCommand: true,
dockerId: id,
command: `cat /etc/docker/daemon.json`
});
daemonJsonParsed = JSON.parse(daemonJson);
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
isUpdated = true;
daemonJsonParsed['live-restore'] = true;
}
if (!daemonJsonParsed?.features?.buildkit) {
isUpdated = true;
daemonJsonParsed.features = {
buildkit: true
};
}
} catch (error) {
isUpdated = true;
}
try {
if (isUpdated) {
await executeCommand({
shell: true,
command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}`
});
await executeCommand({
dockerId: id,
command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json`
});
await executeCommand({ command: `rm /tmp/${daemonJson}` });
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
}
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } });
} catch (error) {
console.log(error)
throw new Error('Error while verifying remote docker engine');
}
}
export async function verifyRemoteDockerEngine(
request: FastifyRequest<OnlyId>,
reply: FastifyReply
) {
const { id } = request.params;
try {
await verifyRemoteDockerEngineFn(id);
return reply.code(201).send();
} catch ({ status, message }) {
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } });
return errorHandler({ status, message });
}
}
export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const destination = await prisma.destinationDocker.findUnique({ where: { id } })
const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
return {
isRunning
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
try {
const { id } = request.params;
const destination = await prisma.destinationDocker.findUnique({ where: { id } });
const { found: isRunning } = await checkContainer({
dockerId: destination.id,
container: 'coolify-proxy',
remove: true
});
return {
isRunning
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}

View File

@@ -1,5 +1,5 @@
import { FastifyPluginAsync } from 'fastify';
import { assignSSHKey, checkDestination, deleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers';
import { assignSSHKey, checkDestination, deleteDestination, forceDeleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers';
import type { OnlyId } from '../../../../types';
import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types';
@@ -14,6 +14,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/:id', async (request) => await getDestination(request));
fastify.post<NewDestination>('/:id', async (request, reply) => await newDestination(request, reply));
fastify.delete<OnlyId>('/:id', async (request) => await deleteDestination(request));
fastify.delete<OnlyId>('/:id/force', async (request) => await forceDeleteDestination(request));
fastify.get<OnlyId>('/:id/status', async (request) => await getDestinationStatus(request));
fastify.post<SaveDestinationSettings>('/:id/settings', async (request) => await saveDestinationSettings(request));

View File

@@ -1,9 +1,9 @@
import axios from "axios";
import { compareVersions } from "compare-versions";
import cuid from "cuid";
import bcrypt from "bcryptjs";
import { compareVersions } from 'compare-versions';
import cuid from 'cuid';
import bcrypt from 'bcryptjs';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import {
asyncExecShell,
asyncSleep,
cleanupDockerStorage,
errorHandler,
@@ -12,25 +12,107 @@ import {
prisma,
uniqueName,
version,
} from "../../../lib/common";
import { supportedServiceTypesAndVersions } from "../../../lib/services/supportedVersions";
import { scheduler } from "../../../lib/scheduler";
import type { FastifyReply, FastifyRequest } from "fastify";
import type { Login, Update } from ".";
import type { GetCurrentUser } from "./types";
executeCommand
} from '../../../lib/common';
import { scheduler } from '../../../lib/scheduler';
import type { FastifyReply, FastifyRequest } from 'fastify';
import type { Login, Update } from '.';
import type { GetCurrentUser } from './types';
export async function hashPassword(password: string): Promise<string> {
const saltRounds = 15;
export async function hashPassword(password: string, saltRounds = 15): Promise<string> {
return bcrypt.hash(password, saltRounds);
}
export async function backup(request: FastifyRequest) {
try {
const { backupData } = request.params;
let std = null;
const [id, backupType, type, zipped, storage] = backupData.split(':');
console.log(id, backupType, type, zipped, storage);
const database = await prisma.database.findUnique({ where: { id } });
if (database) {
// await executeDockerCmd({
// dockerId: database.destinationDockerId,
// command: `docker pull coollabsio/backup:latest`,
// })
std = await executeCommand({
dockerId: database.destinationDockerId,
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
});
}
if (std.stdout) {
return std.stdout;
}
if (std.stderr) {
return std.stderr;
}
return 'nope';
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function cleanupManually(request: FastifyRequest) {
try {
const { serverId } = request.body;
const destination = await prisma.destinationDocker.findUnique({
where: { id: serverId },
where: { id: serverId }
});
await cleanupDockerStorage(destination.id, true, true);
await cleanupDockerStorage(destination.id, true);
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function refreshTags() {
try {
const { default: got } = await import('got');
try {
if (isDev) {
let tags = await fs.readFile('./devTags.json', 'utf8');
try {
if (await fs.stat('./testTags.json')) {
const testTags = await fs.readFile('./testTags.json', 'utf8');
if (testTags.length > 0) {
tags = JSON.parse(tags).concat(JSON.parse(testTags));
}
}
} catch (error) { }
await fs.writeFile('./tags.json', tags);
} else {
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text();
await fs.writeFile('/app/tags.json', tags);
}
} catch (error) {
console.log(error);
}
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function refreshTemplates() {
try {
const { default: got } = await import('got');
try {
if (isDev) {
let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
try {
if (await fs.stat('./testTemplate.yaml')) {
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
}
} catch (error) { }
const response = await fs.readFile('./devTemplates.yaml', 'utf8');
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(response)));
} else {
const response = await got
.get('https://get.coollabs.io/coolify/service-templates.yaml')
.text();
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)));
}
} catch (error) {
console.log(error);
}
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });
@@ -38,24 +120,29 @@ export async function cleanupManually(request: FastifyRequest) {
}
export async function checkUpdate(request: FastifyRequest) {
try {
const { default: got } = await import('got');
const isStaging =
request.hostname === "staging.coolify.io" ||
request.hostname === "arm.coolify.io";
request.hostname === 'staging.coolify.io' || request.hostname === 'arm.coolify.io';
const currentVersion = version;
const { data: versions } = await axios.get(
`https://get.coollabs.io/versions.json?appId=${process.env["COOLIFY_APP_ID"]}&version=${currentVersion}`
);
const latestVersion = versions["coolify"].main.version;
const { coolify } = await got
.get('https://get.coollabs.io/versions.json', {
searchParams: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
.json();
const latestVersion = coolify.main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isStaging) {
return {
isUpdateAvailable: true,
latestVersion: "next",
latestVersion: 'next'
};
}
return {
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
latestVersion,
latestVersion
};
} catch ({ status, message }) {
return errorHandler({ status, message });
@@ -67,14 +154,22 @@ export async function update(request: FastifyRequest<Update>) {
try {
if (!isDev) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell(
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
);
let image = `ghcr.io/coollabsio/coolify:${latestVersion}`;
try {
await executeCommand({ command: `docker pull ${image}` });
} catch (error) {
image = `coollabsio/coolify:${latestVersion}`;
await executeCommand({ command: `docker pull ${image}` });
}
await executeCommand({ shell: true, command: `ls .env || env | grep "^COOLIFY" | sort > .env` });
await executeCommand({
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
});
await executeCommand({
shell: true,
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ${image} /bin/sh -c "env | grep "^COOLIFY" | sort > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
});
return {};
} else {
await asyncSleep(2000);
@@ -87,12 +182,12 @@ export async function update(request: FastifyRequest<Update>) {
export async function resetQueue(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId;
if (teamId === "0") {
if (teamId === '0') {
await prisma.build.updateMany({
where: { status: { in: ["queued", "running"] } },
data: { status: "canceled" },
where: { status: { in: ['queued', 'running'] } },
data: { status: 'canceled' }
});
scheduler.workers.get("deployApplication").postMessage("cancel");
scheduler.workers.get('deployApplication').postMessage('cancel');
}
} catch ({ status, message }) {
return errorHandler({ status, message });
@@ -101,9 +196,9 @@ export async function resetQueue(request: FastifyRequest<any>) {
export async function restartCoolify(request: FastifyRequest<any>) {
try {
const teamId = request.user.teamId;
if (teamId === "0") {
if (teamId === '0') {
if (!isDev) {
asyncExecShell(`docker restart coolify`);
await executeCommand({ command: `docker restart coolify` });
return {};
} else {
return {};
@@ -111,7 +206,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
}
throw {
status: 500,
message: "You are not authorized to restart Coolify.",
message: 'You are not authorized to restart Coolify.'
};
} catch ({ status, message }) {
return errorHandler({ status, message });
@@ -122,128 +217,152 @@ export async function showDashboard(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
const applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
let applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }
});
const databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true },
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }
});
const services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { destinationDocker: true, teams: true },
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, teams: true }
});
const gitSources = await prisma.gitSource.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { teams: true },
where: {
OR: [
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
{ isSystemWide: true }
]
},
include: { teams: true }
});
const destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
include: { teams: true },
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { teams: true }
});
const settings = await listSettings();
let foundUnconfiguredApplication = false;
for (const application of applications) {
if (
((!application.buildPack || !application.branch) && !application.simpleDockerfile) ||
!application.destinationDockerId ||
(!application.settings?.isBot && !application?.fqdn && application.buildPack !== 'compose')
) {
foundUnconfiguredApplication = true;
}
}
let foundUnconfiguredService = false;
for (const service of services) {
if (!service.fqdn) {
foundUnconfiguredService = true;
}
}
let foundUnconfiguredDatabase = false;
for (const database of databases) {
if (!database.version) {
foundUnconfiguredDatabase = true;
}
}
return {
foundUnconfiguredApplication,
foundUnconfiguredDatabase,
foundUnconfiguredService,
applications,
databases,
services,
gitSources,
destinations,
settings,
settings
};
} catch ({ status, message }) {
return errorHandler({ status, message });
}
}
export async function login(
request: FastifyRequest<Login>,
reply: FastifyReply
) {
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
if (request.user) {
return reply.redirect("/dashboard");
return reply.redirect('/dashboard');
} else {
const { email, password, isLogin } = request.body || {};
if (!email || !password) {
throw { status: 500, message: "Email and password are required." };
throw { status: 500, message: 'Email and password are required.' };
}
const users = await prisma.user.count();
const userFound = await prisma.user.findUnique({
where: { email },
include: { teams: true, permission: true },
rejectOnNotFound: false,
rejectOnNotFound: false
});
if (!userFound && isLogin) {
throw { status: 500, message: "User not found." };
throw { status: 500, message: 'User not found.' };
}
const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
let uid = cuid();
let permission = "read";
let permission = 'read';
let isAdmin = false;
if (users === 0) {
await prisma.setting.update({
where: { id },
data: { isRegistrationEnabled: false },
data: { isRegistrationEnabled: false }
});
uid = "0";
uid = '0';
}
if (userFound) {
if (userFound.type === "email") {
if (userFound.password === "RESETME") {
if (userFound.type === 'email') {
if (userFound.password === 'RESETME') {
const hashedPassword = await hashPassword(password);
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
if (userFound.id === "0") {
if (userFound.id === '0') {
await prisma.user.update({
where: { email: userFound.email },
data: { password: "RESETME" },
data: { password: 'RESETME' }
});
} else {
await prisma.user.update({
where: { email: userFound.email },
data: { password: "RESETTIMEOUT" },
data: { password: 'RESETTIMEOUT' }
});
}
throw {
status: 500,
message:
"Password reset link has expired. Please request a new one.",
message: 'Password reset link has expired. Please request a new one.'
};
} else {
await prisma.user.update({
where: { email: userFound.email },
data: { password: hashedPassword },
data: { password: hashedPassword }
});
return {
userId: userFound.id,
teamId: userFound.id,
permission: userFound.permission,
isAdmin: true,
isAdmin: true
};
}
}
const passwordMatch = await bcrypt.compare(
password,
userFound.password
);
const passwordMatch = await bcrypt.compare(password, userFound.password);
if (!passwordMatch) {
throw {
status: 500,
message: "Wrong password or email address.",
message: 'Wrong password or email address.'
};
}
uid = userFound.id;
isAdmin = true;
}
} else {
permission = "owner";
permission = 'owner';
isAdmin = true;
if (!isRegistrationEnabled) {
throw {
status: 404,
message: "Registration disabled by administrator.",
message: 'Registration disabled by administrator.'
};
}
const hashedPassword = await hashPassword(password);
@@ -253,17 +372,17 @@ export async function login(
id: uid,
email,
password: hashedPassword,
type: "email",
type: 'email',
teams: {
create: {
id: uid,
name: uniqueName(),
destinationDocker: { connect: { network: "coolify" } },
},
destinationDocker: { connect: { network: 'coolify' } }
}
},
permission: { create: { teamId: uid, permission: "owner" } },
permission: { create: { teamId: uid, permission: 'owner' } }
},
include: { teams: true },
include: { teams: true }
});
} else {
await prisma.user.create({
@@ -271,16 +390,16 @@ export async function login(
id: uid,
email,
password: hashedPassword,
type: "email",
type: 'email',
teams: {
create: {
id: uid,
name: uniqueName(),
},
name: uniqueName()
}
},
permission: { create: { teamId: uid, permission: "owner" } },
permission: { create: { teamId: uid, permission: 'owner' } }
},
include: { teams: true },
include: { teams: true }
});
}
}
@@ -288,23 +407,20 @@ export async function login(
userId: uid,
teamId: uid,
permission,
isAdmin,
isAdmin
};
}
}
export async function getCurrentUser(
request: FastifyRequest<GetCurrentUser>,
fastify
) {
export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) {
let token = null;
const { teamId } = request.query;
try {
const user = await prisma.user.findUnique({
where: { id: request.user.userId },
where: { id: request.user.userId }
});
if (!user) {
throw "User not found";
throw 'User not found';
}
} catch (error) {
throw { status: 401, message: error };
@@ -313,17 +429,15 @@ export async function getCurrentUser(
try {
const user = await prisma.user.findFirst({
where: { id: request.user.userId, teams: { some: { id: teamId } } },
include: { teams: true, permission: true },
include: { teams: true, permission: true }
});
if (user) {
const permission = user.permission.find(
(p) => p.teamId === teamId
).permission;
const permission = user.permission.find((p) => p.teamId === teamId).permission;
const payload = {
...request.user,
teamId,
permission: permission || null,
isAdmin: permission === "owner" || permission === "admin",
isAdmin: permission === 'owner' || permission === 'admin'
};
token = fastify.jwt.sign(payload);
}
@@ -331,10 +445,13 @@ export async function getCurrentUser(
// No new token -> not switching teams
}
}
const pendingInvitations = await prisma.teamInvitation.findMany({
where: { uid: request.user.userId }
});
return {
settings: await prisma.setting.findFirst(),
supportedServiceTypesAndVersions,
settings: await prisma.setting.findUnique({ where: { id: '0' } }),
pendingInvitations,
token,
...request.user,
...request.user
};
}

View File

@@ -5,9 +5,10 @@ import { decrypt, errorHandler, prisma, uniqueName } from '../../../../lib/commo
import { day } from '../../../../lib/dayjs';
import type { OnlyId } from '../../../../types';
import type { BodyId, InviteToTeam, SaveTeam, SetPermission } from './types';
import type { BodyId, DeleteUserFromTeam, InviteToTeam, SaveTeam, SetPermission } from './types';
export async function listTeams(request: FastifyRequest) {
export async function listAccounts(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
@@ -15,10 +16,24 @@ export async function listTeams(request: FastifyRequest) {
where: { id: userId },
select: { id: true, email: true, teams: true }
});
let accounts = [];
let allTeams = [];
let accounts = await prisma.user.findMany({ where: { teams: { some: { id: teamId } } }, select: { id: true, email: true, teams: true } });
if (teamId === '0') {
accounts = await prisma.user.findMany({ select: { id: true, email: true, teams: true } });
}
return {
account,
accounts
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function listTeams(request: FastifyRequest) {
try {
const userId = request.user.userId;
const teamId = request.user.teamId;
let allTeams = [];
if (teamId === '0') {
allTeams = await prisma.team.findMany({
where: { users: { none: { id: userId } } },
include: { permissions: true }
@@ -28,18 +43,30 @@ export async function listTeams(request: FastifyRequest) {
where: { users: { some: { id: userId } } },
include: { permissions: true }
});
const invitations = await prisma.teamInvitation.findMany({ where: { uid: userId } });
return {
ownTeams,
allTeams,
invitations,
account,
accounts
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function removeUserFromTeam(request: FastifyRequest<DeleteUserFromTeam>, reply: FastifyReply) {
try {
const { uid } = request.body;
const { id } = request.params;
const userId = request.user.userId;
const foundUser = await prisma.team.findMany({ where: { id, users: { some: { id: userId } } } });
if (foundUser.length === 0) {
return errorHandler({ status: 404, message: 'Team not found' });
}
await prisma.team.update({ where: { id }, data: { users: { disconnect: { id: uid } } } });
await prisma.permission.deleteMany({ where: { teamId: id, userId: uid } })
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteTeam(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const userId = request.user.userId;

View File

@@ -1,19 +1,22 @@
import { FastifyPluginAsync } from 'fastify';
import { acceptInvitation, changePassword, deleteTeam, getTeam, inviteToTeam, listTeams, newTeam, removeUser, revokeInvitation, saveTeam, setPermission } from './handlers';
import { acceptInvitation, changePassword, deleteTeam, getTeam, inviteToTeam, listAccounts, listTeams, newTeam, removeUser, removeUserFromTeam, revokeInvitation, saveTeam, setPermission } from './handlers';
import type { OnlyId } from '../../../../types';
import type { BodyId, InviteToTeam, SaveTeam, SetPermission } from './types';
import type { BodyId, DeleteUserFromTeam, InviteToTeam, SaveTeam, SetPermission } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
return await request.jwtVerify()
})
fastify.get('/', async (request) => await listTeams(request));
fastify.get('/', async (request) => await listAccounts(request));
fastify.post('/new', async (request, reply) => await newTeam(request, reply));
fastify.get('/teams', async (request) => await listTeams(request));
fastify.get<OnlyId>('/team/:id', async (request, reply) => await getTeam(request, reply));
fastify.post<SaveTeam>('/team/:id', async (request, reply) => await saveTeam(request, reply));
fastify.delete<OnlyId>('/team/:id', async (request, reply) => await deleteTeam(request, reply));
fastify.post<DeleteUserFromTeam>('/team/:id/user/remove', async (request, reply) => await removeUserFromTeam(request, reply));
fastify.post<InviteToTeam>('/team/:id/invitation/invite', async (request, reply) => await inviteToTeam(request, reply))
fastify.post<BodyId>('/team/:id/invitation/accept', async (request) => await acceptInvitation(request));
@@ -23,7 +26,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.delete<BodyId>('/user/remove', async (request, reply) => await removeUser(request, reply));
fastify.post<BodyId>('/user/password', async (request, reply) => await changePassword(request, reply));
// fastify.delete('/user', async (request, reply) => await deleteUser(request, reply));
};

View File

@@ -5,6 +5,14 @@ export interface SaveTeam extends OnlyId {
name: string
}
}
export interface DeleteUserFromTeam {
Body: {
uid: string
},
Params: {
id: string
}
}
export interface InviteToTeam {
Body: {
email: string,

View File

@@ -1,5 +1,5 @@
import { FastifyPluginAsync } from 'fastify';
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify, backup } from './handlers';
import { GetCurrentUser } from './types';
export interface Update {
@@ -23,9 +23,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate]
}, async (request) => await getCurrentUser(request, fastify));
fastify.get('/undead', {
onRequest: [fastify.authenticate]
}, async function () {
fastify.get('/undead', async function () {
return { message: 'nope' };
});
@@ -47,13 +45,17 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
onRequest: [fastify.authenticate]
}, async (request) => await restartCoolify(request));
fastify.post('/internal/resetQueue', {
fastify.post('/internal/resetQueue', {
onRequest: [fastify.authenticate]
}, async (request) => await resetQueue(request));
fastify.post('/internal/cleanup', {
onRequest: [fastify.authenticate]
}, async (request) => await cleanupManually(request));
// fastify.get('/internal/backup/:backupData', {
// onRequest: [fastify.authenticate]
// }, async (request) => await backup(request));
};
export default root;

Some files were not shown because too many files have changed in this diff Show More