Compare commits

...

302 Commits

Author SHA1 Message Date
Andras Bacsai
9c02af6b52 Update github-actions.yml 2022-05-06 15:41:42 +02:00
Andras Bacsai
6a3f4ba171 Merge pull request #413 from coollabsio/next
v2.7.0 again
2022-05-06 15:40:30 +02:00
Andras Bacsai
6a6426fe6b fix: Sentry 2022-05-06 15:40:07 +02:00
Andras Bacsai
21256746c3 Update github-actions.yml 2022-05-06 15:23:18 +02:00
Andras Bacsai
c34d643f95 Update github-actions.yml 2022-05-06 15:22:22 +02:00
Andras Bacsai
0be402af82 Update github-actions.yml 2022-05-06 15:18:38 +02:00
Andras Bacsai
b5b0b6524d Update github-actions.yml 2022-05-06 15:10:28 +02:00
Andras Bacsai
22f1a3c908 Merge pull request #412 from coollabsio/next
v2.7.0
2022-05-06 14:55:45 +02:00
Andras Bacsai
fa5f439858 fix: Cancel 2022-05-06 14:45:50 +02:00
Andras Bacsai
7cc760eecf fix: Disable sentry for now 2022-05-06 14:43:28 +02:00
Andras Bacsai
af0652f6b2 fix: DNS check 2022-05-06 14:07:43 +02:00
Andras Bacsai
9e009bebaa fix 2022-05-06 11:53:53 +02:00
Andras Bacsai
8e53ae3484 fix: Check DNS in prod only 2022-05-06 11:42:24 +02:00
Andras Bacsai
7ceb8f1537 fix: Better DNS check to prevent errors 2022-05-06 11:41:39 +02:00
Andras Bacsai
b0eae8cfe9 fix: Cancel old builds in database 2022-05-06 09:55:39 +02:00
Andras Bacsai
febef372b8 fix: Cancel jobs 2022-05-06 08:42:06 +02:00
Andras Bacsai
a18e3659aa TODO for myself 2022-05-05 15:28:32 +02:00
Andras Bacsai
e2e342851a fix: Remove debug info 2022-05-05 15:25:22 +02:00
Andras Bacsai
bee3292088 fixes 2022-05-05 15:23:34 +02:00
Andras Bacsai
f56d4dbbb3 fix: Check domain for coolify before saving 2022-05-05 15:18:13 +02:00
Andras Bacsai
eccd7c96d7 fix: Do not run SSL renew in development 2022-05-05 13:31:36 +02:00
Andras Bacsai
4046c472ed chore: version++ 2022-05-05 13:29:46 +02:00
Andras Bacsai
0da4a1024a Add feedback link 2022-05-05 13:22:42 +02:00
Andras Bacsai
aa2f328640 fix: logos for dbs 2022-05-05 13:20:59 +02:00
Andras Bacsai
4d22b610b6 Merge pull request #398 from vasani-arpit/main
Structured issue making using new github issues forms.
2022-05-05 13:10:10 +02:00
Andras Bacsai
e91c3eab9c Merge pull request #396 from Cyril-Beeckman/main
[WIP] Added MariaDB database
2022-05-05 13:06:58 +02:00
Andras Bacsai
2e8fd6f0c7 fix: exposedPorts 2022-05-04 15:45:44 +02:00
Andras Bacsai
90fde24b40 Merge pull request #318 from CharcoalStyles/exposePort
Added expose port for applications
2022-05-04 15:45:16 +02:00
Andras Bacsai
02a1f50776 Update README.md 2022-05-04 14:40:00 +02:00
Andras Bacsai
57b97a9204 Merge pull request #411 from coollabsio/gh-actions
GitHub Actions for release
2022-05-04 14:37:50 +02:00
Andras Bacsai
1ec03693d3 Update github-actions.yml 2022-05-04 14:31:03 +02:00
Andras Bacsai
4246d86694 Update github-actions.yml 2022-05-04 14:30:35 +02:00
Andras Bacsai
2cce1f8459 Update github-actions.yml 2022-05-04 14:11:46 +02:00
Andras Bacsai
3937cfec53 Update github-actions.yml 2022-05-04 14:08:24 +02:00
Andras Bacsai
259aeeb67a Update github-actions.yml 2022-05-04 14:06:06 +02:00
Andras Bacsai
9d53bc0926 Update github-actions.yml 2022-05-04 14:03:00 +02:00
Andras Bacsai
1211f3c9fd Update github-actions.yml 2022-05-04 13:59:59 +02:00
Andras Bacsai
c07d6aa702 Update github-actions.yml 2022-05-04 13:44:48 +02:00
Andras Bacsai
4f662dbf21 Update github-actions.yml 2022-05-04 13:36:46 +02:00
Andras Bacsai
a4301c5d23 Update github-actions.yml 2022-05-04 13:32:35 +02:00
Andras Bacsai
86b7824c78 Update github-actions.yml 2022-05-04 13:22:54 +02:00
Andras Bacsai
435f063c36 Update and rename github-actions-demo.yml to github-actions.yml 2022-05-04 13:20:46 +02:00
Andras Bacsai
902a764ff2 Update github-actions-demo.yml 2022-05-04 13:06:15 +02:00
Andras Bacsai
4097378847 Update github-actions-demo.yml 2022-05-04 13:05:01 +02:00
Andras Bacsai
5f3567e808 Create github-actions-demo.yml 2022-05-04 13:02:16 +02:00
Andras Bacsai
7325353ced Merge pull request #406 from coollabsio/next
v2.6.3
2022-05-03 22:50:53 +02:00
Andras Bacsai
68f5b32876 fix: missing node versions 2022-05-03 22:49:52 +02:00
Andras Bacsai
8d4eaad920 Merge pull request #402 from coollabsio/next
v2.6.2
2022-05-03 12:11:39 +02:00
Andras Bacsai
4b38865cc9 fix: Webhook build images 2022-05-03 12:07:37 +02:00
Andras Bacsai
030cb124e5 Merge pull request #400 from coollabsio/next
v2.6.1
2022-05-03 11:41:51 +02:00
Andras Bacsai
fd363ec017 update readme.md 2022-05-03 11:40:09 +02:00
Andras Bacsai
8b813fb07a fix: Renew certificates 2022-05-03 11:40:02 +02:00
Aaron Styles
326f0dac1b Merge github.com:coollabsio/coolify into exposePort 2022-05-03 16:14:58 +10:00
Arpit Vasani
828faaf2b1 Update --bug-report.yaml 2022-05-03 11:38:49 +05:30
Arpit Vasani
9582664406 Create --task.yaml 2022-05-03 11:37:32 +05:30
Arpit Vasani
ec5474b72b Create --feature-request.yaml 2022-05-03 11:36:13 +05:30
Arpit Vasani
62d1011d9f Create --bug-report.yaml 2022-05-03 11:33:19 +05:30
Arpit Vasani
0a7ec6bd20 Create config.yml 2022-05-03 11:29:10 +05:30
Cyril Beeckman
b84c37cd8f Update README and remove duplicate for NextJS 2022-05-02 20:27:39 +02:00
Cyril Beeckman
887d65e512 Change MariaDB logo 2022-05-02 17:06:25 +02:00
Cyril Beeckman
3543a9c809 Added MariaDB database 2022-05-02 16:25:24 +02:00
Andras Bacsai
40da3ff9fe fix: Update autoupdate env variable 2022-05-02 15:50:40 +02:00
Andras Bacsai
2315192f4b Merge pull request #380 from coollabsio/next
v2.6.0
2022-05-02 14:59:40 +02:00
Andras Bacsai
0faa1540f4 ui fixes 2022-05-02 14:42:19 +02:00
Andras Bacsai
00cab67e73 feat: Cancel builds! 2022-05-02 14:15:50 +02:00
Andras Bacsai
b92bc9eebb fix: build image 2022-05-02 13:00:13 +02:00
Andras Bacsai
1905db16e8 beta features 2022-05-02 09:43:38 +02:00
Andras Bacsai
3e9cf7285b fix locale 2022-05-02 09:21:01 +02:00
Andras Bacsai
6fdbc572fe update locale 2022-05-02 09:13:56 +02:00
Aaron Styles
f94e17134e Added expose port for Services 2022-04-30 22:47:00 +10:00
Andras Bacsai
3fd50ebb12 fix: checking low disk space 2022-04-30 13:54:19 +02:00
Aaron Styles
40cbee0d75 Removed some checking that doesn't work properly. Added a switch for exposing a port. 2022-04-30 21:34:00 +10:00
Andras Bacsai
0eb7f4526e feat: DNS check settings for SSL generation 2022-04-30 13:21:18 +02:00
Andras Bacsai
646d92757a fix: Fider envs 2022-04-29 23:51:40 +02:00
Andras Bacsai
51efa01b11 fix: migration 2022-04-29 23:41:31 +02:00
Andras Bacsai
dc4a63ef92 fix: UI 2022-04-29 23:36:10 +02:00
Andras Bacsai
1b717ac091 Explainer 2022-04-29 23:33:04 +02:00
Andras Bacsai
e93d97f2bc Revert "fix: Always use IP address for webhooks"
This reverts commit 880865f1f2.
2022-04-29 23:25:15 +02:00
Andras Bacsai
45c904e876 fix: remove unnecessary test endpoint 2022-04-29 23:25:02 +02:00
Andras Bacsai
880865f1f2 fix: Always use IP address for webhooks 2022-04-29 23:02:58 +02:00
Andras Bacsai
8e42203b89 feat: Database and services logs 2022-04-29 22:25:37 +02:00
Andras Bacsai
2bd91fa970 migration for fider 2022-04-29 22:25:27 +02:00
Andras Bacsai
a3fd95020d feat: Fider service 2022-04-29 22:25:04 +02:00
Andras Bacsai
e5b1ce4eef feat: Laravel 2022-04-29 22:24:14 +02:00
Andras Bacsai
531973baab feat: Laravel buildpack is working! 2022-04-29 11:26:31 +02:00
Andras Bacsai
b6e6a1ccf1 WIP laravel 2022-04-28 16:40:32 +02:00
Andras Bacsai
1140afe2c9 feat: gzip compression 2022-04-28 16:40:23 +02:00
Andras Bacsai
f8f17832de WIP Laravel 2022-04-28 16:31:46 +02:00
Andras Bacsai
caaf030517 WIP: Laravel 2022-04-28 15:10:45 +02:00
Andras Bacsai
106aee31bd fix: Team switching moved to IAM menu 2022-04-28 14:12:19 +02:00
Aaron Styles
c98ed5338a Merged upstream and fixed expose port implementation 2022-04-28 21:49:13 +10:00
Andras Bacsai
48fa4ff245 feat: Hasura as a service 2022-04-27 15:37:50 +02:00
Andras Bacsai
d75d2880e5 fix: Unami svg size 2022-04-27 15:19:07 +02:00
Andras Bacsai
ec907b0ce4 Merge branch 'next' of github.com:coollabsio/coolify into next 2022-04-27 14:56:30 +02:00
Andras Bacsai
2cda0b22c2 chore: version++ 2022-04-27 14:56:24 +02:00
Andras Bacsai
a0076db42e Merge pull request #378 from KayhanB/main
Added new services to readme
2022-04-27 14:07:10 +02:00
Burak
a37cf49c2a Merge pull request #1 from KayhanB/update-readme
update readme for newly added services
2022-04-26 22:46:40 +03:00
Burak
c4833c3cc2 update readme for newly added services 2022-04-26 22:45:59 +03:00
Andras Bacsai
d03fbd9224 feat: select base image for buildpacks 2022-04-26 14:51:08 +02:00
Andras Bacsai
5998212b82 WIP: Base image selector 2022-04-25 23:44:06 +02:00
Andras Bacsai
62ccab22d6 fix: Packagemanager finder 2022-04-25 23:08:08 +02:00
Andras Bacsai
5ccea1cfcc Merge pull request #370 from coollabsio/next
v2.5.2
2022-04-25 17:49:07 +02:00
Andras Bacsai
8ccb1bd34c show autoupdate in localhost 2022-04-25 17:48:25 +02:00
Andras Bacsai
c1a48dcf1e feat: Autoupdater 2022-04-25 15:51:43 +02:00
Andras Bacsai
11d74c0c1f feat: Coolify auto-updater 2022-04-25 09:54:28 +02:00
Andras Bacsai
8290ee856f migration for umami 2022-04-25 09:11:49 +02:00
Andras Bacsai
08332c8321 fix: Contribution guide 2022-04-25 08:55:04 +02:00
Andras Bacsai
046f738b7d feat: Umami service 2022-04-25 08:54:53 +02:00
Andras Bacsai
07708155ac WIP: Umami service 2022-04-25 00:00:06 +02:00
Andras Bacsai
df5e23c7c2 fix: Contribution guide 2022-04-24 00:27:27 +02:00
Andras Bacsai
41adc02801 fix: Contribution 2022-04-24 00:25:38 +02:00
Andras Bacsai
72b650b086 fix: Simplify list services 2022-04-24 00:24:08 +02:00
Andras Bacsai
06fe3f33c0 fix: Contribution guide 2022-04-24 00:23:35 +02:00
Andras Bacsai
cbabf7fc51 chore: version++ 2022-04-23 18:46:00 +02:00
Andras Bacsai
6aeafda604 fix: Reactivate posgtres password 2022-04-23 16:12:16 +02:00
Andras Bacsai
30d656698e Merge pull request #369 from coollabsio/next
v2.5.1
2022-04-23 13:16:35 +02:00
Andras Bacsai
94d1af01df Merge pull request #365 from coollabsio/restray-restray_i18n
v2.5.1
2022-04-23 13:15:13 +02:00
Andras Bacsai
af97d399b6 fix: Code cleanups 2022-04-22 13:57:28 +02:00
Andras Bacsai
2f90fd1fe6 fix: No logs found 2022-04-22 11:44:04 +02:00
Andras Bacsai
c05a140b0b fix: GitHub token cleanup on team switch 2022-04-22 11:43:55 +02:00
Andras Bacsai
cbfb9a3844 chore: version++ 2022-04-22 11:20:15 +02:00
Andras Bacsai
5a227f70c6 fix: Do not activate i18n for now 2022-04-22 11:19:56 +02:00
Andras Bacsai
44a102443d fix: Application logs is not reversed and queried better 2022-04-21 23:07:54 +02:00
Andras Bacsai
cf7fdf198d fix: locales 2022-04-21 14:57:52 +02:00
Andras Bacsai
68f2f4f978 fix: Vscode permission fix 2022-04-21 11:25:51 +02:00
Andras Bacsai
029b623f08 fix: i18n 2022-04-21 10:05:27 +02:00
Andras Bacsai
fe3702847a Merge branch 'restray_i18n' of https://github.com/restray/coolify into restray-restray_i18n 2022-04-21 09:51:29 +02:00
Andras Bacsai
e9b852a30e Merge pull request #361 from coollabsio/next
v2.5.0
2022-04-20 23:14:54 +02:00
Andras Bacsai
1d4e5df5a2 fix contribution guide 2022-04-20 23:14:19 +02:00
Andras Bacsai
5e14b72fe4 Merge pull request #362 from coollabsio/contribution
Extended contribution guide
2022-04-20 23:08:50 +02:00
Andras Bacsai
8ebff72cde fix: Correct branch shown in build logs 2022-04-20 23:02:19 +02:00
Andras Bacsai
e16643c48c feat: Query container state periodically 2022-04-20 22:49:24 +02:00
Andras Bacsai
65c8f55ee6 fix: Text on deno buildpack 2022-04-20 22:27:10 +02:00
Andras Bacsai
fbc81ab3eb feat/fix: Show exited containers on UI & better UX 2022-04-20 22:24:41 +02:00
Andras Bacsai
a4d56fd79a feat: Deno DB migration 2022-04-20 22:24:00 +02:00
Andras Bacsai
ce45cb8aca package updates 2022-04-20 22:23:35 +02:00
Andras Bacsai
7f8428cd17 fix: Deno configurations 2022-04-20 22:23:25 +02:00
Andras Bacsai
14d79031c1 Merge pull request #360 from lichtscheu/buildpack-deno
Buildpack Deno
2022-04-20 19:06:27 +02:00
Andras Bacsai
b8aa7b6d08 Internal changes 2022-04-20 15:15:18 +02:00
Andras Bacsai
397ca7f20e Merge pull request #357 from coollabsio/next
v2.4.11
2022-04-20 14:19:49 +02:00
Andras Bacsai
e10b76a46b feat: Fluentbit investigation 2022-04-20 13:33:04 +02:00
Andras Bacsai
b46566280d fix: Application logs 2022-04-20 09:23:06 +02:00
Andras Bacsai
3ab6a231eb feat: Testing fluentd logging driver 2022-04-20 00:20:37 +02:00
lichtscheu
2bc2ae9b6e Merge remote-tracking branch 'upstream/next' into buildpack-deno
# Conflicts:
#	src/routes/applications/[id]/index.svelte
2022-04-19 23:17:03 +02:00
Andras Bacsai
2b28f8bd8f feat: Multiply dockerfile locations for docker buildpack 2022-04-19 22:34:28 +02:00
lichtscheu
dcdac29135 Merge remote-tracking branch 'upstream/main' into buildpack-deno 2022-04-19 22:09:21 +02:00
lichtscheu
591ee29e0d feat: initial deno support 2022-04-19 22:08:42 +02:00
Andras Bacsai
625e71ab08 fix: white-labeled custom logo 2022-04-19 18:23:04 +02:00
Andras Bacsai
b0af54587b feat: Add persistent storage for services 2022-04-18 23:49:08 +02:00
Andras Bacsai
be3080df08 fix: Pull new images for services all the time it's started. 2022-04-18 22:51:55 +02:00
Andras Bacsai
04685c9f9d fix: Scroll to top for logs 2022-04-18 22:46:39 +02:00
Andras Bacsai
1a83f2635f fix: Switch to stream on applications logs 2022-04-18 00:44:08 +02:00
Andras Bacsai
630aa45c87 fix: Application logs paginated 2022-04-18 00:42:08 +02:00
Andras Bacsai
0c3a381d1f fix: Buildlog line number is not string 2022-04-17 23:32:27 +02:00
Andras Bacsai
ffac7c5c87 chore:version++ 2022-04-17 21:08:19 +02:00
Andras Bacsai
410800e81c fix: use arm based certbot on arm 2022-04-17 21:07:59 +02:00
Andras Bacsai
9481beb61f Merge pull request #355 from coollabsio/next
v2.4.10
2022-04-17 20:37:56 +02:00
Andras Bacsai
141f2481a7 fix: Change user's id in sftp wp instance 2022-04-17 20:22:42 +02:00
Andras Bacsai
ea18f25adc ui: show extraconfig if wp is running 2022-04-17 20:22:21 +02:00
Andras Bacsai
9018184747 fix: Stop sFTP connection on wp stop 2022-04-17 20:22:07 +02:00
Andras Bacsai
4fc2dd55f5 chore: version++ 2022-04-17 19:17:20 +02:00
Andras Bacsai
5ef9a282eb fix: Wordpress extra config 2022-04-17 19:17:12 +02:00
Andras Bacsai
93a6518974 docs: update 2022-04-16 23:04:29 +02:00
Andras Bacsai
07aa285b27 updates on docs 2022-04-16 22:31:30 +02:00
Andras Bacsai
bf01e9e29f Grammar things 2022-04-16 22:28:17 +02:00
Andras Bacsai
d70672ba4b switch example type 2022-04-16 22:21:42 +02:00
Andras Bacsai
5eeb519ed6 docs: update 2022-04-16 22:19:41 +02:00
Andras Bacsai
5f047e4adf docs: How to add new services 2022-04-16 22:16:47 +02:00
Andras Bacsai
56b9a376bd fix: use redis-alpine 2022-04-14 23:48:52 +02:00
Andras Bacsai
0a1d31a188 Merge pull request #349 from coollabsio/v2.4.9
fix: Switch from bitnami/redis to normal redis
2022-04-14 23:42:13 +02:00
Andras Bacsai
64c9fb9a1b fix: Switch from bitnami/redis to normal redis 2022-04-14 23:40:23 +02:00
Andras Bacsai
47aad15cd5 Merge pull request #347 from coollabsio/v2.4.9
v2.4.9
2022-04-14 23:29:15 +02:00
Andras Bacsai
260a47a366 fix: Id of service container 2022-04-14 23:11:24 +02:00
Andras Bacsai
fd4bbe17f0 fix: Restart local docker coolify proxy in case of something happens to it 2022-04-14 21:43:22 +02:00
Andras Bacsai
25ff637703 fix: Remove proxy container in case of dependent container is down 2022-04-14 21:43:05 +02:00
Andras Bacsai
f571453696 fix: Better performance for cleanup images 2022-04-14 18:45:42 +02:00
Andras Bacsai
5cd7533972 fix: Loading of new destinations 2022-04-14 18:34:43 +02:00
Andras Bacsai
3a252509d0 fix: Add HTTP proxy checks 2022-04-14 15:04:18 +02:00
Andras Bacsai
2bd3802a6f fix: Improved tcp proxy monitoring for databases/ftp 2022-04-14 00:04:46 +02:00
Andras Bacsai
ce2757f514 fix: Teams view 2022-04-13 21:06:22 +02:00
Andras Bacsai
8419cdf604 fix: Postgres root pw is pw field 2022-04-13 19:59:30 +02:00
Andras Bacsai
907c2414ae chore:version++ 2022-04-13 19:52:56 +02:00
Andras Bacsai
f82207564f Merge pull request #344 from coollabsio/v2.4.8
v2.4.8
2022-04-13 19:19:04 +02:00
Andras Bacsai
991a09838c chore: version++ 2022-04-13 16:08:40 +02:00
Andras Bacsai
25df4bfd85 fix: Remove system wide pw reset 2022-04-13 16:05:26 +02:00
Andras Bacsai
d2f89d001b fix: GitLab typo 2022-04-13 16:05:08 +02:00
Andras Bacsai
1971f227fd fix: Register should happen if coolify proxy cannot be started 2022-04-13 14:23:42 +02:00
Andras Bacsai
c1adffe260 Merge pull request #343 from coollabsio/v2.4.7
v2.4.7
2022-04-13 13:12:35 +02:00
Andras Bacsai
e725887a55 chore:version++ 2022-04-13 13:12:23 +02:00
Andras Bacsai
5bf79b75b0 fix: Destinations to HAProxy 2022-04-13 13:10:04 +02:00
Andras Bacsai
6926975e40 Merge pull request #341 from coollabsio/v2.4.6
v2.4.6
2022-04-13 08:40:10 +02:00
Andras Bacsai
978a01c968 fix: Reverting postgres password for now 2022-04-13 08:35:20 +02:00
Andras Bacsai
f421f5ee84 fix: No permission on first registration 2022-04-12 23:57:08 +02:00
Andras Bacsai
383831c7b8 fix: Restart policy for resources 2022-04-12 23:12:09 +02:00
Andras Bacsai
41329facf7 fix: Try catch me 2022-04-12 22:49:48 +02:00
Andras Bacsai
7d3c644148 fix: DNS check before creating SSL cert 2022-04-12 22:18:54 +02:00
Andras Bacsai
7fab9b5930 fix: ProjectID for Github 2022-04-12 22:18:43 +02:00
Andras Bacsai
58763ef84c fix: Load all branches, not just the first 30 2022-04-12 21:48:50 +02:00
Andras Bacsai
0e6abf172b fix: Meilisearch service 2022-04-12 21:09:38 +02:00
Andras Bacsai
9e681ece41 chore: version++ 2022-04-12 20:58:02 +02:00
Andras Bacsai
28f87a306d fix: Cleanup images older than a day 2022-04-12 20:57:49 +02:00
Andras Bacsai
23e8833208 Merge pull request #339 from coollabsio/v2.4.5
v2.4.5
2022-04-12 19:08:46 +02:00
Andras Bacsai
03962663c2 fix: Timeout values 2022-04-12 18:21:10 +02:00
Andras Bacsai
cc2ec55c4d chore: version++ 2022-04-12 16:50:13 +02:00
Andras Bacsai
ff2c38aa16 fix: Invitations 2022-04-12 16:49:59 +02:00
Andras Bacsai
b5a9a2cea8 fix: Types 2022-04-12 16:49:52 +02:00
Andras Bacsai
cd3f661f7e Merge pull request #336 from coollabsio/v2.4.4
v2.4.4
2022-04-12 11:02:35 +02:00
Andras Bacsai
41bf6b5b86 fixes 2022-04-12 10:47:53 +02:00
Andras Bacsai
a4e7c85184 Add only amd release 2022-04-12 10:14:18 +02:00
Andras Bacsai
19aca9ab35 chore: version++ 2022-04-12 10:13:19 +02:00
Andras Bacsai
08704c289a fix: Proxy 2022-04-12 10:12:46 +02:00
Andras Bacsai
2224c22c6e fix: haproxy build stuffs 2022-04-12 09:22:27 +02:00
Andras Bacsai
b281889acd Merge branch 'main' of github.com:coollabsio/coolify into main 2022-04-12 09:20:12 +02:00
Andras Bacsai
cfc50a27b0 Package.json update 2022-04-12 09:19:48 +02:00
Andras Bacsai
ed5f21da6a Merge pull request #335 from coollabsio/arm
v2.4.3 - ARM!
2022-04-12 09:10:57 +02:00
Andras Bacsai
78f3eb81dd Merge pull request #314 from Mobilpadde/fix-coloured-tooltips
Tooltip with corresponding colours
2022-04-12 07:57:09 +02:00
Andras Bacsai
6a833934ce Merge pull request #293 from dominicbachmann/improve-typing
Started to introduce more typing
2022-04-11 22:40:47 +02:00
Andras Bacsai
45bf6f77d1 Merge branch 'arm' into improve-typing 2022-04-11 22:39:45 +02:00
Andras Bacsai
a1b3b7b687 Merge branch 'arm' of github.com:coollabsio/coolify into arm 2022-04-11 22:31:32 +02:00
Andras Bacsai
7ebcad6abb fix: Update dockerfile 2022-04-11 22:31:27 +02:00
Andras Bacsai
fed6d2bf07 Merge pull request #301 from esdete2/main
Rearrange ARGs in Docker build pack
2022-04-11 22:31:16 +02:00
Andras Bacsai
bea4943e9f chore: update build packages 2022-04-11 20:43:19 +02:00
Andras Bacsai
1979e431b8 chore: update build scripts 2022-04-11 20:40:06 +02:00
Andras Bacsai
9bead1d6b4 chore: Version++ 2022-04-11 20:36:46 +02:00
Andras Bacsai
56c4295e16 chore: Update packages 2022-04-11 20:36:15 +02:00
Andras Bacsai
7c7b5a61e5 fix: Remove unnecessary save button haha 2022-04-11 20:36:03 +02:00
Andras Bacsai
abaa13fda8 Merge branch 'main' into arm 2022-04-11 20:29:29 +02:00
esdete
042bfeddbb Merge branch 'main' into main 2022-04-11 17:47:50 +02:00
Mads Bram Cordes
f45ab067ce Add fuchsia for IAM 2022-04-11 16:58:00 +02:00
Mads Bram Cordes
97a6f04aaa Merge branch 'main' into fix-coloured-tooltips 2022-04-11 16:55:37 +02:00
Aaron Charcoal Styles
27f1e1d7cd Merge branch 'main' into exposePort 2022-04-11 09:49:07 +00:00
esdete
c3f4245164 Merge branch 'main' into main 2022-04-09 15:44:13 +02:00
Aaron Charcoal Styles
8f3f9ebade Merge branch 'main' into exposePort 2022-04-08 18:48:16 +00:00
esdete
157e5fd7aa Merge branch 'main' into main 2022-04-08 20:07:43 +02:00
Aaron Styles
1bd33fea98 Added expose port for applications 2022-04-08 17:12:01 +10:00
Mads Bram Cordes
039953588e Add tooltip colours to correspond with colour of Icon 2022-04-08 00:11:30 +02:00
dominicbachmann
9da08d600b Merged v2.4.0 2022-04-07 01:03:13 +02:00
dominicbachmann
be41c0dd02 Added types for store 2022-04-06 21:51:19 +02:00
dominicbachmann
a17b7a564e Added types for form 2022-04-06 21:49:43 +02:00
dominicbachmann
de37ee9f1c Added types for crypto 2022-04-06 21:10:37 +02:00
dominicbachmann
8212868b92 Added types for api 2022-04-06 21:09:15 +02:00
dominicbachmann
b44d8578d9 Added types for queues/sslrenewal 2022-04-06 21:05:36 +02:00
dominicbachmann
0358cf2de2 Added types for queues/ssl 2022-04-06 21:05:12 +02:00
dominicbachmann
94da008a47 Added types for queues/proxy 2022-04-06 21:04:51 +02:00
dominicbachmann
456b1b8074 Added types for queues/logger 2022-04-06 21:04:14 +02:00
dominicbachmann
78e6a7d1d3 Improved code quality of queues/index 2022-04-06 21:03:20 +02:00
dominicbachmann
76dc7ffb68 Added types for queues/cleanup 2022-04-06 21:01:47 +02:00
dominicbachmann
211aff7170 Added types for letsencrypt/index 2022-04-06 20:52:46 +02:00
dominicbachmann
bcacefb841 Added types for importers/gitlab 2022-04-06 20:50:57 +02:00
dominicbachmann
4505ad37d8 Added types for importers/github 2022-04-06 20:50:04 +02:00
dominicbachmann
18cf57f33c Added types for haproxy/index 2022-04-06 20:47:22 +02:00
dominicbachmann
8a401f50cb Added types for haproxy/configuration 2022-04-06 20:40:25 +02:00
dominicbachmann
51a5b3b602 Added types to database/users 2022-04-06 20:36:51 +02:00
dominicbachmann
68f9bca054 Added types to database/teams 2022-04-06 20:34:22 +02:00
dominicbachmann
e9e92c6e9e Added types to databse/settings 2022-04-06 20:31:51 +02:00
dominicbachmann
008cfdba09 Added types to database/services 2022-04-06 20:30:29 +02:00
dominicbachmann
9973197fa5 Added types for database/secrets 2022-04-06 20:23:27 +02:00
dominicbachmann
ec3b94cf96 added types for database/logs 2022-04-06 20:16:21 +02:00
dominicbachmann
c4cb92c78d Added types for database/gitSource 2022-04-06 20:15:15 +02:00
dominicbachmann
c390f82246 Added types to database/gitlab 2022-04-06 20:01:35 +02:00
dominicbachmann
b4f98e24a1 Added types to database/github 2022-04-06 19:56:47 +02:00
dominicbachmann
e042c5cfde Added types for database/databases 2022-04-06 19:45:47 +02:00
dominicbachmann
faeae8fd6c Added typings for database/destinations 2022-04-06 19:34:17 +02:00
Philip Schmidt
fd652bfce6 write args at the beginning of dockerfile and inherit them for each stage 2022-04-06 18:33:02 +02:00
dominicbachmann
82f7633c3a Improved typing and quality of database/checks and database/common code 2022-04-05 21:15:02 +02:00
dominicbachmann
9fdac2741a Improved typing and quality of applications.ts 2022-04-05 20:48:33 +02:00
dominicbachmann
8fb5260809 Resolved merge conflicts 2022-04-05 20:17:53 +02:00
dominicbachmann
e08ec12d26 Introduced typing for the buildJob and cleaned up common.ts 2022-04-05 20:11:19 +02:00
Restray
c39cb42601 feat (i18n) : go back i18n loading json files 2022-04-04 17:56:32 +02:00
Restray
0ead17ab70 Patch translation module not loaded 2022-04-04 17:06:03 +02:00
Restray
4a6062522e Merge branch 'main' into restray_i18n 2022-04-04 16:54:51 +02:00
Restray
bd15d85732 Add last translations for 2.3.0 2022-04-04 12:37:24 +02:00
Restray
b4bbd22781 Merge branch 'restray_i18n' of github.com:restray/coolify into restray_i18n 2022-04-04 12:31:48 +02:00
Restray
d4c972584a Add english translation for register page 2022-04-04 12:30:22 +02:00
Restray
edef4bd4a0 Merge branch 'main' into restray_i18n 2022-04-04 12:29:20 +02:00
Restray
448611039c Patch langs problems 2022-04-04 11:50:54 +02:00
Restray
e4f701b148 Add auto detect of locales files and contrib guide 2022-04-03 21:47:58 +02:00
Restray
8cd561b8cc Update french translations 2022-04-03 19:58:10 +02:00
Restray
a284928352 Patch flags and add french translation 2022-04-03 14:34:48 +02:00
Restray
fe787538e3 Finish routes translations 2022-04-03 14:14:59 +02:00
Restray
360fb5ea37 Add services i18n 2022-04-03 00:18:48 +02:00
Restray
13891110ce Add reset i18n 2022-04-02 23:57:37 +02:00
Restray
c1c25d59c8 Add "new" i18n 2022-04-02 23:53:10 +02:00
Restray
a53bda1436 Add destinations i18n 2022-04-02 23:34:30 +02:00
Restray
7a0d151467 Add database translation 2022-04-02 23:17:59 +02:00
Restray
a788b7bc13 Add translation for applications components 2022-04-02 23:00:03 +02:00
Restray
8f58b14629 Add application i18n 2022-04-02 22:04:50 +02:00
Restray
269250ef3d Begin translation and finish i18n system 2022-04-02 21:08:55 +02:00
Restray
a3241516cb Change the way to load i18n (go throw cookie) 2022-04-02 20:25:24 +02:00
Restray
943300509b Revert "Add Locale URL"
This reverts commit d910b21185.
2022-04-02 19:29:22 +02:00
Restray
92d1f5aa55 Revert "Patch bugs on locale redirections"
This reverts commit 614eb923d8.
2022-04-02 19:28:20 +02:00
Restray
614eb923d8 Patch bugs on locale redirections 2022-04-02 17:33:50 +02:00
Restray
d910b21185 Add Locale URL 2022-04-02 16:15:00 +02:00
Andras Bacsai
1b43976ff0 Update proxy build commands 2022-04-02 13:39:24 +02:00
Andras Bacsai
321fb019eb Update dockerfiles for arm 2022-04-01 23:02:23 +02:00
Andras Bacsai
f6858a68e0 Update schema 2022-04-01 22:51:08 +02:00
Restray
741db1778b feat: install svelte-18n and init setup 2022-04-01 22:50:55 +02:00
Andras Bacsai
fe17e2eaba Prisma Engine build script 2022-04-01 17:57:37 +02:00
Andras Bacsai
22ef0b5d29 Update packages 2022-04-01 17:46:08 +02:00
Andras Bacsai
823279fb60 Updates 2022-04-01 17:16:11 +02:00
Andras Bacsai
f56361c0ca updates for ARM 2022-04-01 14:25:55 +02:00
Andras Bacsai
4946ca2d91 Dockerfile for multiarch builds 2022-04-01 00:08:29 +02:00
255 changed files with 9078 additions and 2741 deletions

View File

@@ -2,5 +2,7 @@ COOLIFY_APP_ID=
COOLIFY_SECRET_KEY=12341234123412341234123412341234
COOLIFY_DATABASE_URL=file:../db/dev.db
COOLIFY_SENTRY_DSN=
COOLIFY_IS_ON="docker"
COOLIFY_WHITE_LABELED="false"
COOLIFY_IS_ON=docker
COOLIFY_WHITE_LABELED=false
COOLIFY_WHITE_LABELED_ICON=
COOLIFY_AUTO_UPDATE=false

View File

@@ -0,0 +1,47 @@
name: 🐞 Bug report
description: Create a bug report to help us improve coolify
title: "[Bug]: "
labels: [Bug]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report! Please fill the form in English
- type: checkboxes
attributes:
label: Is there an existing issue for this?
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Description
description: A concise description of what you're experiencing and what you expect.
placeholder: |
When I do <X>, <Y> happens and I see the error message attached below:
```...```
What I expect is <Z>
validations:
required: true
- type: textarea
attributes:
label: Steps To Reproduce
description: Add steps to reproduce this behaviour, include console / network logs & videos
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: "The version of your coolify Instance"
placeholder: "2.5.2"
validations:
required: true

View File

@@ -0,0 +1,31 @@
name: 🛠️ Feature request
description: Suggest an idea to improve coolify
title: '[Feature]: '
labels: [Enhancement]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue related to this feature request already exists.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Summary
description: One paragraph description of the feature.
validations:
required: true
- type: textarea
attributes:
label: Why should this be worked on?
description: A concise description of the problems or use cases for this feature request.
validations:
required: true

20
.github/ISSUE_TEMPLATE/--task.yaml vendored Normal file
View File

@@ -0,0 +1,20 @@
name: 📝 Task
description: Create a task for the team to work on
title: "[Task]: "
labels: [Task]
body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue related to this already exists.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: SubTasks
placeholder: |
- Sub Task 1
- Sub Task 2
validations:
required: false

5
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,5 @@
blank_issues_enabled: true
contact_links:
- name: 🤔 Questions and Help
url: https://discord.com/invite/6rDM4fkymF
about: Reach out to us on discord or our github discussions page.

39
.github/workflows/github-actions.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: release-coolify
on:
release:
types: published
jobs:
make-it-coolifyed:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
-
name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
cache-from: type=registry,ref=coollabsio/coolify:buildcache
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max

11
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,11 @@
{
"i18n-ally.localesPaths": ["src/lib/locales"],
"i18n-ally.keystyle": "nested",
"i18n-ally.extract.ignoredByFiles": {
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
},
"i18n-ally.sourceLanguage": "en",
"i18n-ally.enabledFrameworks": ["svelte"],
"i18n-ally.enabledParsers": ["js", "ts", "json"],
"i18n-ally.extract.autoDetect": true
}

View File

@@ -1,14 +1,23 @@
# Welcome
# 👋 Welcome
First of all, thank you for considering to contribute to my project! It means a lot 💜.
First of all, thank you for considering contributing to my project! It means a lot 💜.
# Technical skills required
## 🙋 Want to help?
- Node.js / Javascript
- Svelte / SvelteKit
- Prisma.io
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
# Recommended Pull Request Guideline
Follow the [introduction](#introduction) to get started then start contributing!
This is a little list of what you can do to help the project:
- [🧑‍💻 Develop your own ideas](#developer-contribution)
- [🌐 Translate the project](#translation)
## 👋 Introduction
🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS.
#### Recommended Pull Request Guideline
- Fork the project
- Clone your fork repo to local
@@ -16,15 +25,17 @@ First of all, thank you for considering to contribute to my project! It means a
- Push to your fork repo
- Create a pull request: https://github.com/coollabsio/compare
- Write a proper description
- Open the pull request to review
- Open the pull request to review against `next` branch
---
# How to start after you set up your local fork?
This repository best with [pnpm](https://pnpm.io) due to the lock file. I recommend you should try and use `pnpm` as well, because it is cool and efficient!
Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
## Setup development environment
#### Setup a local development environment
- Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
- Install dependencies with `pnpm install`.
@@ -33,12 +44,234 @@ You need to have [Docker Engine](https://docs.docker.com/engine/install/) instal
- Seed the database with base entities with `pnpm db:seed`
- You can start coding after starting `pnpm dev`.
## Database migrations
#### How to start after you set up your local fork?
This repository works better with [pnpm](https://pnpm.io) due to the lock file. I recommend you to give it a try and use `pnpm` as well because it is cool and efficient!
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
## 🧑‍💻 Developer contribution
### Technical skills required
- **Languages**: Node.js / Javascript / Typescript
- **Framework JS/TS**: Svelte / SvelteKit
- **Database ORM**: Prisma.io
- **Docker Engine**
### Database migrations
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
## Tricky parts
### Tricky parts
- BullMQ, the queue system Coolify is using, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking of a different queue/scheduler library. I'm open for discussion!
- BullMQ, the queue system Coolify uses, cannot be hot reloaded. So if you change anything in the files related to it, you need to restart the development process. I'm actively looking for a different queue/scheduler library. I'm open to discussion!
---
# How to add new services
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
- Self-hostable (obviously)
- Open-source
- Maintained (I do not want to add software full of bugs)
## Backend
There are 5 steps you should make on the backend side.
1. Create Prisma / database schema for the new service.
2. Add supported versions of the service.
3. Update global functions.
4. Create API endpoints.
5. Define automatically generated variables.
> I will use [Umami](https://umami.is/) as an example service.
### Create Prisma / database schema for the new service.
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
- Add new model with the new service name.
- Make a relationshup with `Service` model.
- In the `Service` model, the name of the new field should be with low-capital.
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
> You must restart the running development environment to be able to use the new model
> If you use VSCode, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running VSCode.
### Add supported versions
Supported versions are hardcoded into Coolify (for now).
You need to update `supportedServiceTypesAndVersions` function at [src/lib/components/common.ts](src/lib/components/common.ts). Example JSON:
```js
{
// Name used to identify the service internally
name: 'umami',
// Fancier name to show to the user
fancyName: 'Umami',
// Docker base image for the service
baseImage: 'ghcr.io/mikecao/umami',
// Optional: If there is any dependent image, you should list it here
images: [],
// Usable tags
versions: ['postgresql-latest'],
// Which tag is the recommended
recommendedVersion: 'postgresql-latest',
// Application's default port, Umami listens on 3000
ports: {
main: 3000
}
}
```
### Update global functions
1. Add the new service to the `include` variable in [src/lib/database/services.ts](src/lib/database/services.ts), so it will be included in all places in the database queries where it is required.
```js
const include: Prisma.ServiceInclude = {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true,
meiliSearch: true,
umami: true // This line!
};
```
2. Update the database update query with the new service type to `configureServiceType` function in [src/lib/database/services.ts](src/lib/database/services.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
```js
[...]
else if (type === 'umami') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword());
const postgresqlDatabase = 'umami';
const hashSalt = encrypt(generatePassword(64));
await prisma.service.update({
where: { id },
data: {
type,
umami: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
hashSalt,
}
}
}
});
}
```
3. Add decryption process for configurations and passwords to `getService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
```js
if (body.umami?.postgresqlPassword)
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
```
4. Add service deletion query to `removeService` function in [src/lib/database/services.ts](src/lib/database/services.ts)
### Create API endpoints.
You need to add a new folder under [src/routes/services/[id]](src/routes/services/[id]) with the low-capital name of the service. You need 3 default files in that folder.
#### `index.json.ts`:
It has a POST endpoint that updates the service details in Coolify's database, such as name, url, other configurations, like passwords. It should look something like this:
```js
import { getUserDetails } from '$lib/common';
import * as db from '$lib/database';
import { ErrorHandler } from '$lib/database';
import type { RequestHandler } from '@sveltejs/kit';
export const post: RequestHandler = async (event) => {
const { status, body } = await getUserDetails(event);
if (status === 401) return { status, body };
const { id } = event.params;
let { name, fqdn } = await event.request.json();
if (fqdn) fqdn = fqdn.toLowerCase();
try {
await db.updateService({ id, fqdn, name });
return { status: 201 };
} catch (error) {
return ErrorHandler(error);
}
};
```
If it's necessary, you can create your own database update function, specifically for the new service.
#### `start.json.ts`
It has a POST endpoint that sets all the required secrets, persistent volumes, `docker-compose.yaml` file and sends a request to the specified docker engine.
You could also define an `HTTP` or `TCP` proxy for every other port that should be proxied to your server. (See `startHttpProxy` and `startTcpProxy` functions in [src/lib/haproxy/index.ts](src/lib/haproxy/index.ts))
#### `stop.json.ts`
It has a POST endpoint that stops the service and all dependent (TCP/HTTP proxies) containers. If publicPort is specified it also needs to cleanup it from the database.
## Frontend
1. You need to add a custom logo at [src/lib/components/svg/services/](src/lib/components/svg/services/) as a svelte component.
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
2. You need to include it the logo at
- [src/routes/services/index.svelte](src/routes/services/index.svelte) with `isAbsolute` in two places,
- [src/lib/components/ServiceLinks.svelte](src/lib/components/ServiceLinks.svelte) with `isAbsolute` and a link to the docs/main site of the service
- [src/routes/services/[id]/configuration/type.svelte](src/routes/services/[id]/configuration/type.svelte) with `isAbsolute`.
3. By default the URL and the name frontend forms are included in [src/routes/services/[id]/\_Services/\_Services.svelte](src/routes/services/[id]/_Services/_Services.svelte).
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [src/routes/services/[id]/\_Services](src/routes/services/[id]/_Services) with an underscore. For example, see other files in that folder.
You also need to add the new inputs to the `index.json.ts` file of the specific service, like for MinIO here: [src/routes/services/[id]/minio/index.json.ts](src/routes/services/[id]/minio/index.json.ts)
## 🌐 Translate the project
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
### Installation
You must have gone throw all the [intro](#introduction) steps before you can start translating.
It's only an advice, but I recommend you to use:
- Visual Studio Code
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
### Adding a language
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
3. Have fun translating!

View File

@@ -1,31 +1,42 @@
FROM node:16.14.0-alpine
RUN apk add --no-cache g++ cmake make python3
WORKDIR /app
COPY package*.json .
RUN yarn install
COPY . .
RUN yarn build
FROM node:16.14.0-alpine
FROM node:16.14.2-alpine as install
WORKDIR /app
LABEL coolify.managed true
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
RUN apk add --no-cache curl
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
RUN pnpm add -g pnpm
RUN curl -fsSL "https://download.docker.com/linux/static/stable/x86_64/docker-20.10.9.tgz" | tar -xzvf - docker/docker -C . --strip-components 1 && mv docker /usr/bin/docker
RUN mkdir -p ~/.docker/cli-plugins/
RUN curl -SL https://github.com/docker/compose/releases/download/v2.2.2/docker-compose-linux-x86_64 -o ~/.docker/cli-plugins/docker-compose
RUN chmod +x ~/.docker/cli-plugins/docker-compose
COPY package*.json .
RUN pnpm install
FROM node:16.14.2-alpine
ARG TARGETPLATFORM
WORKDIR /app
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
PRISMA_CLIENT_ENGINE_TYPE=binary
COPY --from=coollabsio/prisma-engine:latest /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
COPY --from=install /app/node_modules ./node_modules
COPY . .
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
RUN pnpm add -g pnpm
RUN mkdir -p ~/.docker/cli-plugins/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
RUN pnpm prisma generate
RUN pnpm build
COPY --from=0 /app/docker-compose.yaml .
COPY --from=0 /app/build .
COPY --from=0 /app/package.json .
COPY --from=0 /app/node_modules ./node_modules
COPY --from=0 /app/prisma ./prisma
EXPOSE 3000
CMD ["pnpm", "start"]

View File

@@ -8,21 +8,30 @@ https://demo.coolify.io/
(If it is unresponsive, that means someone overloaded the server. 🙃)
## Feedback
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
## How to install
Installation is automated with the following command:
```bash
/bin/bash -c "$(curl -fsSL https://get.coollabs.io/coolify/install.sh)"
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh
```
If you would like no questions during installation
If you would like no questions during installation:
```bash
wget -q https://get.coollabs.io/coolify/install.sh -O install.sh; sudo bash ./install.sh -f
```
For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
## Features
### Git Sources
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self hosted versions also supported.)
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
- Github
- GitLab
@@ -38,7 +47,7 @@ You can deploy your applications to the following destinations:
### Applications
These are the predefined build packs, but with the Docker build pack, you can host basically anything that is hostable with a single Dockerfile.
These are the predefined build packs, but with the Docker build pack, you can host anything that is hostable with a single Dockerfile.
- Static sites
- NodeJS
@@ -46,10 +55,10 @@ These are the predefined build packs, but with the Docker build pack, you can ho
- NuxtJS
- NextJS
- React/Preact
- NextJS
- Gatsby
- Svelte
- PHP
- Laravel
- Rust
- Docker
@@ -58,6 +67,7 @@ These are the predefined build packs, but with the Docker build pack, you can ho
One-click database is ready to be used internally or shared over the internet:
- MongoDB
- MariaDB
- MySQL
- PostgreSQL
- CouchDB
@@ -77,6 +87,10 @@ You can host cool open-source services as well:
- [LanguageTool](https://languagetool.org)
- [n8n](https://n8n.io)
- [Uptime Kuma](https://github.com/louislam/uptime-kuma)
- [MeiliSearch](https://github.com/meilisearch/meilisearch)
- [Umami](https://github.com/mikecao/umami)
- [Fider](https://fider.io)
- [Hasura](https://hasura.io)
## Migration from v1

View File

@@ -0,0 +1,6 @@
FROM fluent/fluent-bit:1.9.0
COPY fluentbit-dev.conf /tmp/fluentbit.conf
ENTRYPOINT ["/fluent-bit/bin/fluent-bit", "-c", "/tmp/fluentbit.conf"]
# USER root
# RUN ["gem", "install", "fluent-plugin-mongo"]
# USER fluent

View File

@@ -0,0 +1,24 @@
[INPUT]
Name forward
Listen 0.0.0.0
Port 24224
Buffer_Chunk_Size 32KB
Buffer_Max_Size 64KB
[OUTPUT]
Name influxdb
Match *
Host coolify-influxdb
Port 8086
Bucket containerlogs
Org organization
HTTP_Token supertoken
Sequence_Tag _seq
Tag_Keys container_name
[OUTPUT]
Name http
Match *
Host host.docker.internal
Port 3000
URI /logs.json
Format json

View File

@@ -0,0 +1,28 @@
<source>
@type forward
port 24224
bind 0.0.0.0
</source>
<match **>
@type http
endpoint http://host.docker.internal:3000/logs.json
<buffer>
flush_at_shutdown true
flush_mode immediate
flush_thread_count 8
flush_thread_interval 1
flush_thread_burst_interval 1
retry_forever true
retry_type exponential_backoff
</buffer>
</match>
<filter docker.**>
@type parser
key_name log
reserve_data true
<parse>
@type json
</parse>
</filter>

View File

@@ -4,10 +4,10 @@ global
defaults
mode http
log global
timeout http-request 60s
timeout connect 10s
timeout client 60s
timeout server 60s
timeout http-request 120s
timeout connect 20s
timeout client 120s
timeout server 120s
frontend "${APP}"
mode http

View File

@@ -5,10 +5,10 @@ global
defaults
mode http
log global
timeout http-request 60s
timeout connect 10s
timeout client 60s
timeout server 60s
timeout http-request 120s
timeout connect 20s
timeout client 120s
timeout server 120s
userlist haproxy-dataplaneapi
user admin insecure-password "${HAPROXY_PASSWORD}"

View File

@@ -0,0 +1 @@
docker build --platform linux/amd64,linux/arm64 -t coollabsio/prisma-engine -f prisma-engine.Dockerfile --push .

View File

@@ -0,0 +1,10 @@
FROM rust:1.58.1-alpine3.14 as prisma
WORKDIR /prisma
ENV RUSTFLAGS="-C target-feature=-crt-static"
RUN apk --no-cache add openssl direnv git musl-dev openssl-dev build-base perl protoc
RUN git clone --depth=1 --branch=3.12.x https://github.com/prisma/prisma-engines.git /prisma
RUN cargo build --release
FROM alpine
WORKDIR /prisma-engines
COPY --from=prisma /prisma/target/release/query-engine /prisma/target/release/migration-engine /prisma/target/release/introspection-engine /prisma/target/release/prisma-fmt /prisma-engines/

View File

@@ -2,10 +2,8 @@ version: '3.8'
services:
redis:
image: 'bitnami/redis:6.2'
image: redis:6.2-alpine
container_name: coolify-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
networks:
- coolify-infra
ports:
@@ -13,7 +11,24 @@ services:
published: 6379
protocol: tcp
mode: host
# fluentbit:
# container_name: coolify-fluentbit
# build:
# context: ./data/fluentd
# dockerfile: Dockerfile-dev
# ports:
# - target: 24224
# published: 24224
# protocol: tcp
# mode: host
# - target: 24224
# published: 24224
# protocol: udp
# mode: host
# networks:
# - coolify-infra
# extra_hosts:
# - 'host.docker.internal:host-gateway'
networks:
coolify-infra:
attachable: true

View File

@@ -21,11 +21,9 @@ services:
- coolify-infra
depends_on: ['redis']
redis:
image: bitnami/redis:6.2
image: redis:6.2-alpine
restart: always
container_name: coolify-redis
environment:
- ALLOW_EMPTY_PASSWORD=yes
networks:
- coolify-infra

View File

@@ -1,14 +1,14 @@
{
"name": "coolify",
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
"version": "2.4.2",
"version": "2.7.0",
"license": "AGPL-3.0",
"scripts": {
"dev": "docker-compose -f docker-compose-dev.yaml up -d && cross-env NODE_ENV=development & svelte-kit dev",
"dev": "docker-compose -f docker-compose-dev.yaml up -d && cross-env NODE_ENV=development & svelte-kit dev --host 0.0.0.0",
"dev:stop": "docker-compose -f docker-compose-dev.yaml down",
"dev:logs": "docker-compose -f docker-compose-dev.yaml logs -f --tail 10",
"studio": "npx prisma studio",
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js",
"start": "npx prisma migrate deploy && npx prisma generate && npx prisma db seed && node build/index.js",
"build": "svelte-kit build",
"preview": "svelte-kit preview",
"check": "svelte-check --tsconfig ./tsconfig.json",
@@ -17,21 +17,23 @@
"db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"release:staging": "cross-var docker build -t coollabsio/coolify:$npm_package_version . && docker push coollabsio/coolify:$npm_package_version",
"release:pre": "cross-var docker build -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest .",
"release:coolify": "cross-var yarn release:pre && docker push coollabsio/coolify:$npm_package_version && docker push coollabsio/coolify:latest",
"release:haproxy": "docker build -f haproxy.Dockerfile -t coollabsio/coolify-haproxy-alpine:1.0.0 -t coollabsio/coolify-haproxy-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-alpine",
"release:haproxy:tcp": "docker build -f haproxy-tcp.Dockerfile -t coollabsio/coolify-haproxy-tcp-alpine:1.0.0 -t coollabsio/coolify-haproxy-tcp-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-tcp-alpine",
"release:haproxy:http": "docker build -f haproxy-http.Dockerfile -t coollabsio/coolify-haproxy-http-alpine:1.0.0 -t coollabsio/coolify-haproxy-http-alpine:latest . && docker image push --all-tags coollabsio/coolify-haproxy-http-alpine",
"release:production:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
"release:production:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
"release:production:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version -t coollabsio/coolify:latest --push .",
"release:staging:all": "cross-var docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
"release:staging:amd": "cross-var docker build --platform linux/amd64 -t coollabsio/coolify:$npm_package_version --push .",
"release:staging:arm": "cross-var docker build --platform linux/arm64 -t coollabsio/coolify:$npm_package_version --push .",
"release:haproxy": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-alpine:latest -t coollabsio/coolify-haproxy-alpine:1.1.0 -f data/haproxy.Dockerfile --push .",
"release:haproxy:tcp": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-tcp-alpine:latest -t coollabsio/coolify-haproxy-tcp-alpine:1.1.0 -f data/haproxy-tcp.Dockerfile --push .",
"release:haproxy:http": "docker build --platform linux/amd64,linux/arm64 -t coollabsio/coolify-haproxy-http-alpine:latest -t coollabsio/coolify-haproxy-http-alpine:1.1.0 -f data/haproxy-http.Dockerfile --push .",
"prepare": "husky install"
},
"devDependencies": {
"@sveltejs/adapter-node": "1.0.0-next.73",
"@sveltejs/kit": "1.0.0-next.303",
"@types/bcrypt": "5.0.0",
"@sveltejs/kit": "1.0.0-next.316",
"@types/js-cookie": "3.0.1",
"@types/js-yaml": "4.0.5",
"@types/node": "17.0.23",
"@types/node": "17.0.25",
"@types/node-forge": "1.0.1",
"@typescript-eslint/eslint-plugin": "4.31.1",
"@typescript-eslint/parser": "4.31.1",
@@ -43,17 +45,18 @@
"eslint-config-prettier": "8.5.0",
"eslint-plugin-svelte3": "3.4.1",
"husky": "7.0.4",
"lint-staged": "12.3.7",
"lint-staged": "12.4.0",
"postcss": "8.4.12",
"prettier": "2.6.1",
"prettier-plugin-svelte": "2.6.0",
"prettier-plugin-tailwindcss": "0.1.8",
"prettier": "2.6.2",
"prettier-plugin-svelte": "2.7.0",
"prettier-plugin-tailwindcss": "0.1.10",
"prisma": "3.11.1",
"svelte": "3.46.4",
"svelte-check": "2.4.6",
"svelte-preprocess": "4.10.4",
"svelte": "3.47.0",
"svelte-check": "2.7.0",
"svelte-preprocess": "4.10.6",
"svelte-select": "4.4.7",
"tailwindcss": "3.0.23",
"sveltekit-i18n": "2.1.2",
"tailwindcss": "3.0.24",
"ts-node": "10.7.0",
"tslib": "2.3.1",
"typescript": "4.6.3"
@@ -62,26 +65,26 @@
"dependencies": {
"@iarna/toml": "2.2.5",
"@prisma/client": "3.11.1",
"@sentry/node": "6.19.2",
"bcrypt": "5.0.1",
"bullmq": "1.78.1",
"@sentry/node": "6.19.6",
"bcryptjs": "2.4.3",
"bullmq": "1.80.4",
"compare-versions": "4.1.3",
"cookie": "0.4.2",
"cooltipz-css": "2.1.0",
"cookie": "0.5.0",
"cuid": "2.1.8",
"dayjs": "1.11.0",
"dayjs": "1.11.1",
"dockerode": "3.3.1",
"dotenv-extended": "2.9.0",
"generate-password": "1.7.0",
"get-port": "6.1.2",
"got": "12.0.2",
"got": "12.0.3",
"is-ip": "^4.0.0",
"js-cookie": "3.0.1",
"js-yaml": "4.1.0",
"jsonwebtoken": "8.5.1",
"mustache": "4.2.0",
"node-forge": "1.3.0",
"node-forge": "1.3.1",
"p-limit": "4.0.0",
"svelte-kit-cookie-session": "2.1.2",
"svelte-kit-cookie-session": "2.1.3",
"tailwindcss-scrollbar": "0.1.0",
"unique-names-generator": "4.7.1"
},

894
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;

View File

@@ -0,0 +1,12 @@
-- CreateTable
CREATE TABLE "ServicePersistentStorage" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"path" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ServicePersistentStorage_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "ServicePersistentStorage_serviceId_path_key" ON "ServicePersistentStorage"("serviceId", "path");

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "dockerFileLocation" TEXT;

View File

@@ -0,0 +1,3 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "denoMainFile" TEXT;
ALTER TABLE "Application" ADD COLUMN "denoOptions" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Build" ADD COLUMN "branch" TEXT;

View File

@@ -0,0 +1,17 @@
-- CreateTable
CREATE TABLE "Umami" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"umamiAdminPassword" TEXT NOT NULL,
"hashSalt" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Umami_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "Umami_serviceId_key" ON "Umami"("serviceId");

View File

@@ -0,0 +1,22 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,3 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "baseBuildImage" TEXT;
ALTER TABLE "Application" ADD COLUMN "baseImage" TEXT;

View File

@@ -0,0 +1,16 @@
-- CreateTable
CREATE TABLE "Hasura" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"graphQLAdminPassword" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Hasura_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "Hasura_serviceId_key" ON "Hasura"("serviceId");

View File

@@ -0,0 +1,25 @@
-- CreateTable
CREATE TABLE "Fider" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"jwtSecret" TEXT NOT NULL,
"emailNoreply" TEXT,
"emailMailgunApiKey" TEXT,
"emailMailgunDomain" TEXT,
"emailMailgunRegion" TEXT,
"emailSmtpHost" TEXT,
"emailSmtpPort" INTEGER,
"emailSmtpUser" TEXT,
"emailSmtpPassword" TEXT,
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");

View File

@@ -0,0 +1,29 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Fider" (
"id" TEXT NOT NULL PRIMARY KEY,
"serviceId" TEXT NOT NULL,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"jwtSecret" TEXT NOT NULL,
"emailNoreply" TEXT,
"emailMailgunApiKey" TEXT,
"emailMailgunDomain" TEXT,
"emailMailgunRegion" TEXT NOT NULL DEFAULT 'EU',
"emailSmtpHost" TEXT,
"emailSmtpPort" INTEGER,
"emailSmtpUser" TEXT,
"emailSmtpPassword" TEXT,
"emailSmtpEnableStartTls" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Fider_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Fider" ("createdAt", "emailMailgunApiKey", "emailMailgunDomain", "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt") SELECT "createdAt", "emailMailgunApiKey", "emailMailgunDomain", coalesce("emailMailgunRegion", 'EU') AS "emailMailgunRegion", "emailNoreply", "emailSmtpEnableStartTls", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUser", "id", "jwtSecret", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "serviceId", "updatedAt" FROM "Fider";
DROP TABLE "Fider";
ALTER TABLE "new_Fider" RENAME TO "Fider";
CREATE UNIQUE INDEX "Fider_serviceId_key" ON "Fider"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,23 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;

View File

@@ -1,5 +1,6 @@
generator client {
provider = "prisma-client-js"
provider = "prisma-client-js"
binaryTargets = ["native", "linux-musl"]
}
datasource db {
@@ -17,6 +18,8 @@ model Setting {
proxyPassword String
proxyUser String
proxyHash String?
isAutoUpdateEnabled Boolean @default(false)
isDNSCheckEnabled Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
@@ -81,6 +84,7 @@ model Application {
buildPack String?
projectId Int?
port Int?
exposePort Int?
installCommand String?
buildCommand String?
startCommand String?
@@ -90,6 +94,9 @@ model Application {
pythonWSGI String?
pythonModule String?
pythonVariable String?
dockerFileLocation String?
denoMainFile String?
denoOptions String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
settings ApplicationSettings?
@@ -100,6 +107,8 @@ model Application {
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
secrets Secret[]
persistentStorage ApplicationPersistentStorage[]
baseImage String?
baseBuildImage String?
}
model ApplicationSettings {
@@ -117,14 +126,25 @@ model ApplicationSettings {
model ApplicationPersistentStorage {
id String @id @default(cuid())
application Application @relation(fields: [applicationId], references: [id])
applicationId String
path String
applicationId String
path String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([applicationId, path])
}
model ServicePersistentStorage {
id String @id @default(cuid())
service Service @relation(fields: [serviceId], references: [id])
serviceId String
path String
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@unique([serviceId, path])
}
model Secret {
id String @id @default(cuid())
name String
@@ -168,6 +188,7 @@ model Build {
githubAppId String?
gitlabAppId String?
commit String?
branch String?
status String? @default("queued")
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -266,17 +287,18 @@ model DatabaseSettings {
}
model Service {
id String @id @default(cuid())
id String @id @default(cuid())
name String
fqdn String?
dualCerts Boolean @default(false)
exposePort Int?
dualCerts Boolean @default(false)
type String?
version String?
teams Team[]
destinationDockerId String?
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
plausibleAnalytics PlausibleAnalytics?
minio Minio?
vscodeserver Vscodeserver?
@@ -284,6 +306,10 @@ model Service {
ghost Ghost?
serviceSecret ServiceSecret[]
meiliSearch MeiliSearch?
persistentStorage ServicePersistentStorage[]
umami Umami?
hasura Hasura?
fider Fider?
}
model PlausibleAnalytics {
@@ -368,3 +394,52 @@ model MeiliSearch {
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Umami {
id String @id @default(cuid())
serviceId String @unique
postgresqlUser String
postgresqlPassword String
postgresqlDatabase String
postgresqlPublicPort Int?
umamiAdminPassword String
hashSalt String
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Hasura {
id String @id @default(cuid())
serviceId String @unique
postgresqlUser String
postgresqlPassword String
postgresqlDatabase String
postgresqlPublicPort Int?
graphQLAdminPassword String
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Fider {
id String @id @default(cuid())
serviceId String @unique
postgresqlUser String
postgresqlPassword String
postgresqlDatabase String
postgresqlPublicPort Int?
jwtSecret String
emailNoreply String?
emailMailgunApiKey String?
emailMailgunDomain String?
emailMailgunRegion String @default("EU")
emailSmtpHost String?
emailSmtpPort Int?
emailSmtpUser String?
emailSmtpPassword String?
emailSmtpEnableStartTls Boolean @default(false)
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}

View File

@@ -50,6 +50,20 @@ async function main() {
}
});
}
// Set auto-update based on env variable
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
const settings = await prisma.setting.findFirst({});
if (settings) {
await prisma.setting.update({
where: {
id: settings.id
},
data: {
isAutoUpdateEnabled
}
});
}
}
main()
.catch((e) => {

7
src/app.d.ts vendored
View File

@@ -6,7 +6,11 @@ declare namespace App {
cookies: Record<string, string>;
}
interface Platform {}
interface Session extends SessionData {}
interface Session extends SessionData {
whiteLabelDetails: {
icon: string | null;
};
}
interface Stuff {
service: any;
application: any;
@@ -27,6 +31,7 @@ interface SessionData {
userId?: string | null;
teamId?: string | null;
permission?: string;
lang?: string;
isAdmin?: boolean;
expires?: string | null;
}

View File

@@ -6,8 +6,12 @@ import { getUserDetails, sentry } from '$lib/common';
import { version } from '$lib/common';
import cookie from 'cookie';
import { dev } from '$app/env';
import { locales } from '$lib/translations';
const whiteLabeled = process.env['COOLIFY_WHITE_LABELED'] === 'true';
const whiteLabelDetails = {
icon: (whiteLabeled && process.env['COOLIFY_WHITE_LABELED_ICON']) || null
};
export const handle = handleSession(
{
@@ -17,6 +21,24 @@ export const handle = handleSession(
},
async function ({ event, resolve }) {
let response;
const { url, request } = event;
// Get defined locales
const supportedLocales = locales.get();
let locale;
if (event.locals.cookies['lang']) {
locale = event.locals.cookies['lang'];
} else if (!locale) {
locale = `${`${request.headers.get('accept-language')}`.match(
/[a-zA-Z]+?(?=-|_|,|;)/
)}`.toLowerCase();
}
// Set default locale if user preferred locale does not match
if (!supportedLocales.includes(locale)) locale = 'en';
try {
if (event.locals.cookies) {
if (event.locals.cookies['kit.session']) {
@@ -36,12 +58,14 @@ export const handle = handleSession(
}
response = await resolve(event, {
ssr: !event.url.pathname.startsWith('/webhooks/success')
ssr: !event.url.pathname.startsWith('/webhooks/success'),
transformPage: ({ html }) => html.replace(/<html.*>/, `<html lang="${locale}">`)
});
} catch (error) {
console.log(error);
response = await resolve(event, {
ssr: !event.url.pathname.startsWith('/webhooks/success')
ssr: !event.url.pathname.startsWith('/webhooks/success'),
transformPage: ({ html }) => html.replace(/<html.*>/, `<html lang="${locale}">`)
});
response.headers.append(
'Set-Cookie',
@@ -64,20 +88,31 @@ export const handle = handleSession(
expires: new Date('Thu, 01 Jan 1970 00:00:01 GMT')
})
);
} finally {
return response;
}
response.headers.append(
'Set-Cookie',
cookie.serialize('lang', locale, {
path: '/',
sameSite: 'strict',
maxAge: 30 * 24 * 60 * 60
})
);
return response;
}
);
export const getSession: GetSession = function ({ locals }) {
return {
lang: locals.cookies.lang,
version,
whiteLabeled,
whiteLabelDetails,
...locals.session.data
};
};
export async function handleError({ error, event }) {
if (!dev) sentry.captureException(error, event);
// if (!dev) sentry.captureException(error, event);
}

View File

@@ -1,9 +1,15 @@
async function send({ method, path, data = {}, headers, timeout = 30000 }) {
async function send({
method,
path,
data = {},
headers,
timeout = 120000
}): Promise<Record<string, unknown>> {
const controller = new AbortController();
const id = setTimeout(() => controller.abort(), timeout);
const opts = { method, headers: {}, body: null, signal: controller.signal };
if (Object.keys(data).length > 0) {
let parsedData = data;
const parsedData = data;
for (const [key, value] of Object.entries(data)) {
if (value === '') {
parsedData[key] = null;
@@ -43,18 +49,33 @@ async function send({ method, path, data = {}, headers, timeout = 30000 }) {
return responseData;
}
export function get(path, headers = {}): Promise<any> {
export function get(
path: string,
headers?: Record<string, unknown>
): Promise<Record<string, unknown>> {
return send({ method: 'GET', path, headers });
}
export function del(path, data = {}, headers = {}): Promise<any> {
export function del(
path: string,
data: Record<string, unknown>,
headers?: Record<string, unknown>
): Promise<Record<string, unknown>> {
return send({ method: 'DELETE', path, data, headers });
}
export function post(path, data, headers = {}): Promise<any> {
export function post(
path: string,
data: Record<string, unknown>,
headers?: Record<string, unknown>
): Promise<Record<string, unknown>> {
return send({ method: 'POST', path, data, headers });
}
export function put(path, data, headers = {}): Promise<any> {
export function put(
path: string,
data: Record<string, unknown>,
headers?: Record<string, unknown>
): Promise<Record<string, unknown>> {
return send({ method: 'PUT', path, data, headers });
}

View File

@@ -5,6 +5,21 @@ import { scanningTemplates } from '$lib/components/templates';
import { promises as fs } from 'fs';
import { staticDeployments } from '$lib/components/common';
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
const nodeBased = [
'react',
'preact',
'vuejs',
'svelte',
'gatsby',
'astro',
'eleventy',
'node',
'nestjs',
'nuxtjs',
'nextjs'
];
export function makeLabelForStandaloneApplication({
applicationId,
fqdn,
@@ -91,7 +106,9 @@ export const setDefaultConfiguration = async (data) => {
startCommand,
buildCommand,
publishDirectory,
baseDirectory
baseDirectory,
dockerFileLocation,
denoMainFile
} = data;
const template = scanningTemplates[buildPack];
if (!port) {
@@ -102,14 +119,26 @@ export const setDefaultConfiguration = async (data) => {
else if (buildPack === 'php') port = 80;
else if (buildPack === 'python') port = 8000;
}
if (!installCommand) installCommand = template?.installCommand || 'yarn install';
if (!startCommand) startCommand = template?.startCommand || 'yarn start';
if (!buildCommand) buildCommand = template?.buildCommand || null;
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
installCommand = template?.installCommand || 'yarn install';
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
startCommand = template?.startCommand || 'yarn start';
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
buildCommand = template?.buildCommand || null;
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
}
if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
} else {
dockerFileLocation = '/Dockerfile';
}
if (!denoMainFile) {
denoMainFile = 'main.ts';
}
return {
buildPack,
@@ -118,11 +147,19 @@ export const setDefaultConfiguration = async (data) => {
startCommand,
buildCommand,
publishDirectory,
baseDirectory
baseDirectory,
dockerFileLocation,
denoMainFile
};
};
export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId) {
export async function copyBaseConfigurationFiles(
buildPack,
workdir,
buildId,
applicationId,
baseImage
) {
try {
if (buildPack === 'php') {
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
@@ -131,7 +168,7 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap
buildId,
applicationId
});
} else if (staticDeployments.includes(buildPack)) {
} else if (staticDeployments.includes(buildPack) && baseImage.includes('nginx')) {
await fs.writeFile(
`${workdir}/nginx.conf`,
`user nginx;
@@ -159,7 +196,7 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap
include /etc/nginx/mime.types;
default_type application/octet-stream;
server {
listen 80;
server_name localhost;
@@ -184,7 +221,6 @@ export async function copyBaseConfigurationFiles(buildPack, workdir, buildId, ap
}
`
);
await saveBuildLog({ line: 'Copied default configuration file.', buildId, applicationId });
}
} catch (error) {
console.log(error);
@@ -199,3 +235,215 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
startCommand?.includes('pnpm')
);
}
export function setDefaultBaseImage(buildPack) {
const nodeVersions = [
{
value: 'node:lts',
label: 'node:lts'
},
{
value: 'node:18',
label: 'node:18'
},
{
value: 'node:17',
label: 'node:17'
},
{
value: 'node:16',
label: 'node:16'
},
{
value: 'node:14',
label: 'node:14'
},
{
value: 'node:12',
label: 'node:12'
}
];
const staticVersions = [
{
value: 'webdevops/nginx:alpine',
label: 'webdevops/nginx:alpine'
},
{
value: 'webdevops/apache:alpine',
label: 'webdevops/apache:alpine'
}
];
const rustVersions = [
{
value: 'rust:latest',
label: 'rust:latest'
},
{
value: 'rust:1.60',
label: 'rust:1.60'
},
{
value: 'rust:1.60-buster',
label: 'rust:1.60-buster'
},
{
value: 'rust:1.60-bullseye',
label: 'rust:1.60-bullseye'
},
{
value: 'rust:1.60-slim-buster',
label: 'rust:1.60-slim-buster'
},
{
value: 'rust:1.60-slim-bullseye',
label: 'rust:1.60-slim-bullseye'
},
{
value: 'rust:1.60-alpine3.14',
label: 'rust:1.60-alpine3.14'
},
{
value: 'rust:1.60-alpine3.15',
label: 'rust:1.60-alpine3.15'
}
];
const phpVersions = [
{
value: 'webdevops/php-apache:8.0',
label: 'webdevops/php-apache:8.0'
},
{
value: 'webdevops/php-nginx:8.0',
label: 'webdevops/php-nginx:8.0'
},
{
value: 'webdevops/php-apache:7.4',
label: 'webdevops/php-apache:7.4'
},
{
value: 'webdevops/php-nginx:7.4',
label: 'webdevops/php-nginx:7.4'
},
{
value: 'webdevops/php-apache:7.3',
label: 'webdevops/php-apache:7.3'
},
{
value: 'webdevops/php-nginx:7.3',
label: 'webdevops/php-nginx:7.3'
},
{
value: 'webdevops/php-apache:7.2',
label: 'webdevops/php-apache:7.2'
},
{
value: 'webdevops/php-nginx:7.2',
label: 'webdevops/php-nginx:7.2'
},
{
value: 'webdevops/php-apache:7.1',
label: 'webdevops/php-apache:7.1'
},
{
value: 'webdevops/php-nginx:7.1',
label: 'webdevops/php-nginx:7.1'
},
{
value: 'webdevops/php-apache:7.0',
label: 'webdevops/php-apache:7.0'
},
{
value: 'webdevops/php-nginx:7.0',
label: 'webdevops/php-nginx:7.0'
},
{
value: 'webdevops/php-apache:5.6',
label: 'webdevops/php-apache:5.6'
},
{
value: 'webdevops/php-nginx:5.6',
label: 'webdevops/php-nginx:5.6'
},
{
value: 'webdevops/php-apache:8.0-alpine',
label: 'webdevops/php-apache:8.0-alpine'
},
{
value: 'webdevops/php-nginx:8.0-alpine',
label: 'webdevops/php-nginx:8.0-alpine'
},
{
value: 'webdevops/php-apache:7.4-alpine',
label: 'webdevops/php-apache:7.4-alpine'
},
{
value: 'webdevops/php-nginx:7.4-alpine',
label: 'webdevops/php-nginx:7.4-alpine'
},
{
value: 'webdevops/php-apache:7.3-alpine',
label: 'webdevops/php-apache:7.3-alpine'
},
{
value: 'webdevops/php-nginx:7.3-alpine',
label: 'webdevops/php-nginx:7.3-alpine'
},
{
value: 'webdevops/php-apache:7.2-alpine',
label: 'webdevops/php-apache:7.2-alpine'
},
{
value: 'webdevops/php-nginx:7.2-alpine',
label: 'webdevops/php-nginx:7.2-alpine'
},
{
value: 'webdevops/php-apache:7.1-alpine',
label: 'webdevops/php-apache:7.1-alpine'
},
{
value: 'webdevops/php-nginx:7.1-alpine',
label: 'webdevops/php-nginx:7.1-alpine'
}
];
let payload = {
baseImage: null,
baseBuildImage: null,
baseImages: [],
baseBuildImages: []
};
if (nodeBased.includes(buildPack)) {
payload.baseImage = 'node:lts';
payload.baseImages = nodeVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
}
if (staticApps.includes(buildPack)) {
payload.baseImage = 'webdevops/nginx:alpine';
payload.baseImages = staticVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
}
if (buildPack === 'python') {
payload.baseImage = 'python:3-alpine';
}
if (buildPack === 'rust') {
payload.baseImage = 'rust:latest';
payload.baseBuildImage = 'rust:latest';
payload.baseImages = rustVersions;
payload.baseBuildImages = rustVersions;
}
if (buildPack === 'deno') {
payload.baseImage = 'denoland/deno:latest';
}
if (buildPack === 'php') {
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
payload.baseImages = phpVersions;
}
if (buildPack === 'laravel') {
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
payload.baseBuildImage = 'node:18';
payload.baseBuildImages = nodeVersions;
}
return payload;
}

View File

@@ -0,0 +1,62 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const {
workdir,
port,
baseDirectory,
secrets,
pullmergeRequestId,
denoMainFile,
denoOptions,
buildId
} = data;
const Dockerfile: Array<string> = [];
let depsFound = false;
try {
await fs.readFile(`${workdir}${baseDirectory || ''}/deps.ts`);
depsFound = true;
} catch (error) {}
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
});
}
if (depsFound) {
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
Dockerfile.push(`RUN deno cache deps.ts`);
}
Dockerfile.push(`COPY ${denoMainFile} /app`);
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`ENV NO_COLOR true`);
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;
}
}

View File

@@ -10,36 +10,42 @@ export default async function ({
buildId,
baseDirectory,
secrets,
pullmergeRequestId
pullmergeRequestId,
dockerFileLocation
}) {
try {
let file = `${workdir}/Dockerfile`;
const file = `${workdir}${dockerFileLocation}`;
let dockerFileOut = `${workdir}`;
if (baseDirectory) {
file = `${workdir}/${baseDirectory}/Dockerfile`;
workdir = `${workdir}/${baseDirectory}`;
dockerFileOut = `${workdir}${baseDirectory}`;
workdir = `${workdir}${baseDirectory}`;
}
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8'))
.toString()
.trim()
.split('\n');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
if (
(pullmergeRequestId && secret.isPRMRSecret) ||
(!pullmergeRequestId && !secret.isPRMRSecret)
) {
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
Dockerfile.forEach((line, index) => {
if (line.startsWith('FROM')) {
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
}
});
}
}
});
}
await fs.writeFile(`${file}`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, debug });
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, debug, dockerFileLocation });
} catch (error) {
throw error;
}

View File

@@ -2,25 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, imageforBuild): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageforBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
Dockerfile.push(`EXPOSE 80`);
if (baseImage.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const image = 'webdevops/nginx:alpine';
const imageForBuild = 'node:lts';
await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseImage);
await createDockerfile(data, baseBuildImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -13,6 +13,8 @@ import rust from './rust';
import astro from './static';
import eleventy from './static';
import python from './python';
import deno from './deno';
import laravel from './laravel';
export {
node,
@@ -29,5 +31,7 @@ export {
rust,
astro,
eleventy,
python
python,
deno,
laravel
};

View File

@@ -0,0 +1,40 @@
import { buildCacheImageForLaravel, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const { workdir, applicationId, tag, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
Dockerfile.push(`COPY --chown=application:application database/ database/`);
Dockerfile.push(
`RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist`
);
Dockerfile.push(
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/`
);
Dockerfile.push(
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/`
);
Dockerfile.push(
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
);
Dockerfile.push(`COPY --chown=application:application . ./`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
const { baseImage, baseBuildImage } = data;
try {
await buildCacheImageForLaravel(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;
}
}

View File

@@ -2,13 +2,13 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
const Dockerfile: Array<string> = [];
const isPnpm = startCommand.includes('pnpm');
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
@@ -22,11 +22,9 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const image = 'node:lts';
const imageForBuild = 'node:lts';
await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -4,6 +4,7 @@ import { checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
buildId,
workdir,
port,
installCommand,
@@ -17,7 +18,7 @@ const createDockerfile = async (data, image): Promise<void> => {
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -50,8 +51,8 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const image = 'node:lts';
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -11,14 +11,15 @@ const createDockerfile = async (data, image): Promise<void> => {
startCommand,
baseDirectory,
secrets,
pullmergeRequestId
pullmergeRequestId,
buildId
} = data;
const Dockerfile: Array<string> = [];
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -50,8 +51,8 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const image = 'node:lts';
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -11,13 +11,14 @@ const createDockerfile = async (data, image): Promise<void> => {
startCommand,
baseDirectory,
secrets,
pullmergeRequestId
pullmergeRequestId,
buildId
} = data;
const Dockerfile: Array<string> = [];
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -49,8 +50,8 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const image = 'node:lts';
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -2,7 +2,7 @@ import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
const { workdir, baseDirectory } = data;
const { workdir, baseDirectory, buildId, port } = data;
const Dockerfile: Array<string> = [];
let composerFound = false;
try {
@@ -11,7 +11,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
} catch (error) {}
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
if (htaccessFound) {
@@ -22,12 +22,12 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
}
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
const { workdir, baseDirectory } = data;
const { workdir, baseDirectory, baseImage } = data;
try {
let htaccessFound = false;
try {
@@ -36,10 +36,7 @@ export default async function (data) {
} catch (e) {
//
}
const image = htaccessFound
? 'webdevops/php-apache:8.0-alpine'
: 'webdevops/php-nginx:8.0-alpine';
await createDockerfile(data, image, htaccessFound);
await createDockerfile(data, baseImage, htaccessFound);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -10,12 +10,13 @@ const createDockerfile = async (data, image): Promise<void> => {
pullmergeRequestId,
pythonWSGI,
pythonModule,
pythonVariable
pythonVariable,
buildId
} = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -62,8 +63,8 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const image = 'python:3-alpine';
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -2,24 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
Dockerfile.push(`EXPOSE 80`);
if (baseImage.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const image = 'webdevops/nginx:alpine';
const imageForBuild = 'node:lts';
await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -4,11 +4,11 @@ import { promises as fs } from 'fs';
import TOML from '@iarna/toml';
const createDockerfile = async (data, image, name): Promise<void> => {
const { workdir, port, applicationId, tag } = data;
const { workdir, port, applicationId, tag, buildId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`);
Dockerfile.push(`COPY . .`);
@@ -27,14 +27,12 @@ const createDockerfile = async (data, image, name): Promise<void> => {
export default async function (data) {
try {
const { workdir } = data;
const image = 'rust:latest';
const imageForBuild = 'rust:latest';
const { workdir, baseImage, baseBuildImage } = data;
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
const parsedToml: any = TOML.parse(cargoToml);
const name = parsedToml.package.name;
await buildCacheImageWithCargo(data, imageForBuild);
await createDockerfile(data, image, name);
await buildCacheImageWithCargo(data, baseBuildImage);
await createDockerfile(data, baseImage, name);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -10,13 +10,16 @@ const createDockerfile = async (data, image): Promise<void> => {
baseDirectory,
publishDirectory,
secrets,
pullmergeRequestId
pullmergeRequestId,
baseImage,
buildId,
port
} = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -37,17 +40,18 @@ const createDockerfile = async (data, image): Promise<void> => {
} else {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
}
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
Dockerfile.push(`EXPOSE 80`);
if (baseImage.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const image = 'webdevops/nginx:alpine';
const imageForBuild = 'node:lts';
if (data.buildCommand) await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
if (data.buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -2,25 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
Dockerfile.push(`EXPOSE 80`);
if (baseImage.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const image = 'webdevops/nginx:alpine';
const imageForBuild = 'node:lts';
await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -2,24 +2,25 @@ import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
Dockerfile.push(`EXPOSE 80`);
if (baseImage.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const image = 'webdevops/nginx:alpine';
const imageForBuild = 'node:lts';
await buildCacheImageWithNode(data, imageForBuild);
await createDockerfile(data, image);
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -4,6 +4,8 @@ import { dev } from '$app/env';
import * as Sentry from '@sentry/node';
import { uniqueNamesGenerator, adjectives, colors, animals } from 'unique-names-generator';
import type { Config } from 'unique-names-generator';
import { promises as dns } from 'dns';
import { isIP } from 'is-ip';
import * as db from '$lib/database';
import { buildLogQueue } from './queues';
@@ -12,25 +14,27 @@ import { version as currentVersion } from '../../package.json';
import dayjs from 'dayjs';
import Cookie from 'cookie';
import os from 'os';
import cuid from 'cuid';
import type { RequestEvent } from '@sveltejs/kit/types/internal';
import type { Job } from 'bullmq';
import { t } from './translations';
try {
if (!dev) {
Sentry.init({
dsn: process.env['COOLIFY_SENTRY_DSN'],
tracesSampleRate: 0,
environment: 'production',
debug: true,
release: currentVersion,
initialScope: {
tags: {
appId: process.env['COOLIFY_APP_ID'],
'os.arch': os.arch(),
'os.platform': os.platform(),
'os.release': os.release()
}
}
});
// Sentry.init({
// dsn: process.env['COOLIFY_SENTRY_DSN'],
// tracesSampleRate: 0,
// environment: 'production',
// debug: true,
// release: currentVersion,
// initialScope: {
// tags: {
// appId: process.env['COOLIFY_APP_ID'],
// 'os.arch': getOsArch(),
// 'os.platform': os.platform(),
// 'os.release': os.release()
// }
// }
// });
}
} catch (err) {
console.log('Could not initialize Sentry, no worries.');
@@ -45,37 +49,30 @@ const customConfig: Config = {
export const version = currentVersion;
export const asyncExecShell = util.promisify(child.exec);
export const asyncSleep = (delay) => new Promise((resolve) => setTimeout(resolve, delay));
export const asyncSleep = (delay: number): Promise<unknown> =>
new Promise((resolve) => setTimeout(resolve, delay));
export const sentry = Sentry;
export const uniqueName = () => uniqueNamesGenerator(customConfig);
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
export const saveBuildLog = async ({ line, buildId, applicationId }) => {
if (line) {
if (line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `${generateTimestamp()} ${line}`;
return await buildLogQueue.add(buildId, { buildId, line: addTimestamp, applicationId });
export const saveBuildLog = async ({
line,
buildId,
applicationId
}: {
line: string;
buildId: string;
applicationId: string;
}): Promise<Job> => {
if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `${generateTimestamp()} ${line}`;
return await buildLogQueue.add(buildId, { buildId, line: addTimestamp, applicationId });
};
export const isTeamIdTokenAvailable = (request) => {
const cookie = request.headers.cookie
?.split(';')
.map((s) => s.trim())
.find((s) => s.startsWith('teamId='))
?.split('=')[1];
if (!cookie) {
return getTeam(request);
} else {
return cookie;
}
};
export const getTeam = (event) => {
export const getTeam = (event: RequestEvent): string | null => {
const cookies = Cookie.parse(event.request.headers.get('cookie'));
if (cookies?.teamId) {
return cookies.teamId;
@@ -85,14 +82,28 @@ export const getTeam = (event) => {
return null;
};
export const getUserDetails = async (event, isAdminRequired = true) => {
export const getUserDetails = async (
event: RequestEvent,
isAdminRequired = true
): Promise<{
teamId: string;
userId: string;
permission: string;
status: number;
body: { message: string };
}> => {
const teamId = getTeam(event);
const userId = event?.locals?.session?.data?.userId || null;
const { permission = 'read' } = await db.prisma.permission.findFirst({
where: { teamId, userId },
select: { permission: true },
rejectOnNotFound: true
});
let permission = 'read';
if (teamId && userId) {
const data = await db.prisma.permission.findFirst({
where: { teamId, userId },
select: { permission: true },
rejectOnNotFound: true
});
if (data.permission) permission = data.permission;
}
const payload = {
teamId,
userId,
@@ -112,11 +123,11 @@ export const getUserDetails = async (event, isAdminRequired = true) => {
return payload;
};
export function getEngine(engine) {
export function getEngine(engine: string): string {
return engine === '/var/run/docker.sock' ? 'unix:///var/run/docker.sock' : engine;
}
export async function removeContainer(id, engine) {
export async function removeContainer(id: string, engine: string): Promise<void> {
const host = getEngine(engine);
try {
const { stdout } = await asyncExecShell(
@@ -132,11 +143,23 @@ export async function removeContainer(id, engine) {
}
}
export const removeDestinationDocker = async ({ id, engine }) => {
export const removeDestinationDocker = async ({
id,
engine
}: {
id: string;
engine: string;
}): Promise<void> => {
return await removeContainer(id, engine);
};
export const createDirectories = async ({ repository, buildId }) => {
export const createDirectories = async ({
repository,
buildId
}: {
repository: string;
buildId: string;
}): Promise<{ workdir: string; repodir: string }> => {
const repodir = `/tmp/build-sources/${repository}/`;
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
@@ -148,20 +171,108 @@ export const createDirectories = async ({ repository, buildId }) => {
};
};
export function generateTimestamp() {
export function generateTimestamp(): string {
return `${dayjs().format('HH:mm:ss.SSS')} `;
}
export function getDomain(domain) {
export function getDomain(domain: string): string {
return domain?.replace('https://', '').replace('http://', '');
}
export function dashify(str: string, options?: any): string {
if (typeof str !== 'string') return str;
return str
.trim()
.replace(/\W/g, (m) => (/[À-ž]/.test(m) ? m : '-'))
.replace(/^-+|-+$/g, '')
.replace(/-{2,}/g, (m) => (options && options.condense ? '-' : m))
.toLowerCase();
export function getOsArch() {
return os.arch();
}
export async function isDNSValid(event: any, domain: string): Promise<any> {
let resolves = [];
try {
if (isIP(event.url.hostname)) {
resolves = [event.url.hostname];
} else {
resolves = await dns.resolve4(event.url.hostname);
}
} catch (error) {
throw {
message: t.get('application.dns_not_set_error', { domain })
};
}
try {
let ipDomainFound = false;
dns.setServers(['1.1.1.1', '8.8.8.8']);
const dnsResolve = await dns.resolve4(domain);
if (dnsResolve.length > 0) {
for (const ip of dnsResolve) {
if (resolves.includes(ip)) {
ipDomainFound = true;
}
}
}
if (!ipDomainFound) throw false;
} catch (error) {
throw {
message: t.get('application.domain_not_valid')
};
}
}
export async function checkDomainsIsValidInDNS({ event, fqdn, dualCerts }): Promise<any> {
const domain = getDomain(fqdn);
const domainDualCert = domain.includes('www.') ? domain.replace('www.', '') : `www.${domain}`;
dns.setServers(['1.1.1.1', '8.8.8.8']);
let resolves = [];
try {
if (isIP(event.url.hostname)) {
resolves = [event.url.hostname];
} else {
resolves = await dns.resolve4(event.url.hostname);
}
} catch (error) {
throw {
message: t.get('application.dns_not_set_error', { domain })
};
}
if (dualCerts) {
try {
const ipDomain = await dns.resolve4(domain);
const ipDomainDualCert = await dns.resolve4(domainDualCert);
let ipDomainFound = false;
let ipDomainDualCertFound = false;
for (const ip of ipDomain) {
if (resolves.includes(ip)) {
ipDomainFound = true;
}
}
for (const ip of ipDomainDualCert) {
if (resolves.includes(ip)) {
ipDomainDualCertFound = true;
}
}
if (ipDomainFound && ipDomainDualCertFound) return { status: 200 };
throw false;
} catch (error) {
throw {
message: t.get('application.dns_not_set_error', { domain })
};
}
} else {
try {
const ipDomain = await dns.resolve4(domain);
let ipDomainFound = false;
for (const ip of ipDomain) {
if (resolves.includes(ip)) {
ipDomainFound = true;
}
}
if (ipDomainFound) return { status: 200 };
throw false;
} catch (error) {
throw {
message: t.get('application.dns_not_set_error', { domain })
};
}
}
}

View File

@@ -3,6 +3,7 @@
import Clickhouse from './svg/databases/Clickhouse.svelte';
import CouchDb from './svg/databases/CouchDB.svelte';
import MongoDb from './svg/databases/MongoDB.svelte';
import MariaDb from './svg/databases/MariaDB.svelte';
import MySql from './svg/databases/MySQL.svelte';
import PostgreSql from './svg/databases/PostgreSQL.svelte';
import Redis from './svg/databases/Redis.svelte';
@@ -17,6 +18,8 @@
<MongoDb />
{:else if database.type === 'mysql'}
<MySql />
{:else if database.type === 'mariadb'}
<MariaDb />
{:else if database.type === 'postgresql'}
<PostgreSql />
{:else if database.type === 'redis'}

View File

@@ -1,15 +1,18 @@
<script>
export let service;
import Ghost from './svg/services/Ghost.svelte';
import Hasura from './svg/services/Hasura.svelte';
import LanguageTool from './svg/services/LanguageTool.svelte';
import MinIo from './svg/services/MinIO.svelte';
import N8n from './svg/services/N8n.svelte';
import NocoDb from './svg/services/NocoDB.svelte';
import PlausibleAnalytics from './svg/services/PlausibleAnalytics.svelte';
import Umami from './svg/services/Umami.svelte';
import UptimeKuma from './svg/services/UptimeKuma.svelte';
import VaultWarden from './svg/services/VaultWarden.svelte';
import VsCodeServer from './svg/services/VSCodeServer.svelte';
import Wordpress from './svg/services/Wordpress.svelte';
import Fider from './svg/services/Fider.svelte';
</script>
{#if service.type === 'plausibleanalytics'}
@@ -52,4 +55,16 @@
<a href="https://ghost.org" target="_blank">
<Ghost />
</a>
{:else if service.type === 'umami'}
<a href="https://umami.is" target="_blank">
<Umami />
</a>
{:else if service.type === 'hasura'}
<a href="https://hasura.io" target="_blank">
<Hasura />
</a>
{:else if service.type === 'fider'}
<a href="https://fider.io" target="_blank">
<Fider />
</a>
{/if}

View File

@@ -19,7 +19,7 @@ export const staticDeployments = [
'astro',
'eleventy'
];
export const notNodeDeployments = ['php', 'docker', 'rust', 'python'];
export const notNodeDeployments = ['php', 'docker', 'rust', 'python', 'deno', 'laravel'];
export function getDomain(domain) {
return domain?.replace('https://', '').replace('http://', '');
@@ -52,6 +52,12 @@ export const supportedDatabaseTypesAndVersions = [
versions: ['5.0', '4.4', '4.2']
},
{ name: 'mysql', fancyName: 'MySQL', baseImage: 'bitnami/mysql', versions: ['8.0', '5.7'] },
{
name: 'mariadb',
fancyName: 'MariaDB',
baseImage: 'bitnami/mariadb',
versions: ['10.7', '10.6', '10.5', '10.4', '10.3', '10.2']
},
{
name: 'postgresql',
fancyName: 'PostgreSQL',
@@ -180,5 +186,46 @@ export const supportedServiceTypesAndVersions = [
ports: {
main: 7700
}
},
{
name: 'umami',
fancyName: 'Umami',
baseImage: 'ghcr.io/mikecao/umami',
images: ['postgres:12-alpine'],
versions: ['postgresql-latest'],
recommendedVersion: 'postgresql-latest',
ports: {
main: 3000
}
},
{
name: 'hasura',
fancyName: 'Hasura',
baseImage: 'hasura/graphql-engine',
images: ['postgres:12-alpine'],
versions: ['latest', 'v2.5.1'],
recommendedVersion: 'v2.5.1',
ports: {
main: 8080
}
},
{
name: 'fider',
fancyName: 'Fider',
baseImage: 'getfider/fider',
images: ['postgres:12-alpine'],
versions: ['stable'],
recommendedVersion: 'stable',
ports: {
main: 3000
}
}
];
export const getServiceMainPort = (service: string) => {
const serviceType = supportedServiceTypesAndVersions.find((s) => s.name === service);
if (serviceType) {
return serviceType.ports.main;
}
return null;
};

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.1 KiB

View File

@@ -0,0 +1,10 @@
<svg
class="absolute top-0 left-0 -m-4 h-10 w-10"
viewBox="0 0 50 52"
xmlns="http://www.w3.org/2000/svg"
><title>Logomark</title><path
d="M49.626 11.564a.809.809 0 0 1 .028.209v10.972a.8.8 0 0 1-.402.694l-9.209 5.302V39.25c0 .286-.152.55-.4.694L20.42 51.01c-.044.025-.092.041-.14.058-.018.006-.035.017-.054.022a.805.805 0 0 1-.41 0c-.022-.006-.042-.018-.063-.026-.044-.016-.09-.03-.132-.054L.402 39.944A.801.801 0 0 1 0 39.25V6.334c0-.072.01-.142.028-.21.006-.023.02-.044.028-.067.015-.042.029-.085.051-.124.015-.026.037-.047.055-.071.023-.032.044-.065.071-.093.023-.023.053-.04.079-.06.029-.024.055-.05.088-.069h.001l9.61-5.533a.802.802 0 0 1 .8 0l9.61 5.533h.002c.032.02.059.045.088.068.026.02.055.038.078.06.028.029.048.062.072.094.017.024.04.045.054.071.023.04.036.082.052.124.008.023.022.044.028.068a.809.809 0 0 1 .028.209v20.559l8.008-4.611v-10.51c0-.07.01-.141.028-.208.007-.024.02-.045.028-.068.016-.042.03-.085.052-.124.015-.026.037-.047.054-.071.024-.032.044-.065.072-.093.023-.023.052-.04.078-.06.03-.024.056-.05.088-.069h.001l9.611-5.533a.801.801 0 0 1 .8 0l9.61 5.533c.034.02.06.045.09.068.025.02.054.038.077.06.028.029.048.062.072.094.018.024.04.045.054.071.023.039.036.082.052.124.009.023.022.044.028.068zm-1.574 10.718v-9.124l-3.363 1.936-4.646 2.675v9.124l8.01-4.611zm-9.61 16.505v-9.13l-4.57 2.61-13.05 7.448v9.216l17.62-10.144zM1.602 7.719v31.068L19.22 48.93v-9.214l-9.204-5.209-.003-.002-.004-.002c-.031-.018-.057-.044-.086-.066-.025-.02-.054-.036-.076-.058l-.002-.003c-.026-.025-.044-.056-.066-.084-.02-.027-.044-.05-.06-.078l-.001-.003c-.018-.03-.029-.066-.042-.1-.013-.03-.03-.058-.038-.09v-.001c-.01-.038-.012-.078-.016-.117-.004-.03-.012-.06-.012-.09v-.002-21.481L4.965 9.654 1.602 7.72zm8.81-5.994L2.405 6.334l8.005 4.609 8.006-4.61-8.006-4.608zm4.164 28.764l4.645-2.674V7.719l-3.363 1.936-4.646 2.675v20.096l3.364-1.937zM39.243 7.164l-8.006 4.609 8.006 4.609 8.005-4.61-8.005-4.608zm-.801 10.605l-4.646-2.675-3.363-1.936v9.124l4.645 2.674 3.364 1.937v-9.124zM20.02 38.33l11.743-6.704 5.87-3.35-8-4.606-9.211 5.303-8.395 4.833 7.993 4.524z"
fill="#FF2D20"
fill-rule="evenodd"
/></svg
>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -0,0 +1,24 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<svg
xmlns="http://www.w3.org/2000/svg"
id="Layer_1"
data-name="Layer 1"
viewBox="0 0 309.88 252.72"
class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-12 w-12 ' : 'mx-auto w-8 h-8'}
>
<defs>
<style>
.cls-1 {
fill: #fff;
}
</style>
</defs>
<path
class="cls-1"
d="M316,10.05a4.2,4.2,0,0,0-2.84-1c-2.84,0-6.5,1.92-8.46,3l-.79.4a26.81,26.81,0,0,1-10.57,2.66c-3.76.12-7,.34-11.22.77-25,2.58-36.15,21.74-46.89,40.27-5.84,10.08-11.88,20.5-20.16,28.57a55.71,55.71,0,0,1-5.46,4.63c-8.57,6.39-19.33,10.9-27.74,14.12-8.07,3.08-16.86,5.85-25.37,8.53-7.78,2.45-15.14,4.76-21.9,7.28-3.05,1.13-5.64,2-7.93,2.76-6.15,2-10.6,3.53-17.08,8-2.53,1.73-5.07,3.6-6.8,5a71.26,71.26,0,0,0-13.54,14.27A84.81,84.81,0,0,1,77.88,163c-1.36,1.34-3.8,2-7.43,2-4.27,0-9.43-.88-14.91-1.81s-11.46-2-16.46-2c-4.07,0-7.17.66-9.5,2,0,0-3.9,2.28-5.56,5.23l1.62.73a33.56,33.56,0,0,1,6.93,5,33.68,33.68,0,0,0,7.19,5.12A6.37,6.37,0,0,1,42,180.72c-.69,1-1.69,2.29-2.74,3.67-5.77,7.55-9.13,12.32-7.2,14.92a6,6,0,0,0,3,.68c12.59,0,19.34-3.27,27.9-7.41,2.47-1.2,5-2.44,8-3.7,5-2.17,10.38-5.63,16.08-9.29,7.55-4.85,15.36-9.87,22.92-12.3a62.3,62.3,0,0,1,19.23-2.7c8,0,16.42,1.07,24.54,2.11,6.06.78,12.32,1.58,18.47,2,2.39.14,4.6.21,6.76.21a78.48,78.48,0,0,0,8.61-.45l.68-.24c4.32-2.65,6.34-8.34,8.29-13.84,1.26-3.54,2.32-6.72,4-8.74a2.06,2.06,0,0,1,.33-.27.4.4,0,0,1,.49.08.25.25,0,0,1,0,.16c-1,21.51-9.67,35.16-18.42,47.3L177,199.14s8.18,0,12.84-1.8c17-5.08,29.84-16.28,39.18-34.14a144.39,144.39,0,0,0,6.16-14.09c.16-.4,1.64-1.14,1.49.93,0,.61-.08,1.29-.13,2h0c0,.42-.06.85-.08,1.28-.25,3-1,9.34-1,9.34l5.25-2.81c12.66-8,22.42-24.14,29.82-49.25,3.09-10.46,5.34-20.85,7.33-30,2.38-11,4.43-20.43,6.78-24.09,3.69-5.74,9.32-9.62,14.77-13.39.75-.51,1.49-1,2.22-1.54,6.86-4.81,13.67-10.36,15.16-20.71l0-.23C317.93,12.92,317,11,316,10.05Z"
transform="translate(-7.45 -9.1)"
/>
</svg>

View File

@@ -3,31 +3,13 @@
</script>
<svg
class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-10 w-10' : 'mx-auto w-8 h-8'}
xmlns="http://www.w3.org/2000/svg"
class={isAbsolute ? 'absolute top-0 left-0 -m-10 h-20 w-20' : 'mx-auto w-8 h-8'}
id="Layer_1"
data-name="Layer 1"
viewBox="0 0 216.56 448.5"
><defs
><style>
.cls-1 {
fill: #10aa50;
}
.cls-2 {
fill: #b8c4c2;
}
.cls-3 {
fill: #12924f;
}
</style></defs
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 361.67651 499.33603"
><path
class="cls-1"
d="M202.8,179.68c-23-101.47-71-128.49-83.18-147.59C113,21.7,106.25,5.91,106.25,5.91c-.66,9-1.83,14.7-9.51,21.54C81.36,41.16,16,94.42,10.51,209.72c-5.12,107.5,79,173.8,90.18,180.65,8.54,4.2,19,.08,24-3.77,40.54-27.84,96-102.07,78.06-206.92"
/><path
class="cls-2"
d="M109.73,333.11c-2.11,26.62-3.63,42.11-9,57.29,0,0,3.54,25.33,6,52.17l8.77,0a488.62,488.62,0,0,1,9.57-56.2C113.71,380.8,110.16,356.46,109.73,333.11Z"
/><path
class="cls-3"
d="M125.06,386.39h0c-11.48-5.3-14.8-30.13-15.31-53.28A1090.8,1090.8,0,0,0,112.2,218.4c-.6-20.07.3-185.92-4.94-210.2,2.12,4.75,7.24,15.91,12.36,23.88,12.23,19.11,60.19,46.13,83.17,147.61C220.7,284.27,165.57,358.37,125.06,386.39Z"
/>
</svg>
d="M203.77731,148.85754c-10.8147-12.762-20.13269-25.8139-22.02478-28.49224a.426.426,0,0,0-.70032.00006c-1.89172,2.6784-11.20758,15.73022-22.02191,28.49218-92.69141,118.085,14.62982,197.75507,14.62982,197.75507l.87.60461c.8136,12.32624,2.83508,30.041,2.83508,30.041H185.442s2.01282-17.63849,2.83-29.96106l.87549-.68451S296.46774,266.94257,203.77731,148.85754ZM181.404,344.88123h-.001s-4.811-4.10383-6.10962-6.16l-.01172-.22131,5.81946-128.56055a.30281.30281,0,0,1,.605,0l5.81946,128.56036-.01135.22065C186.21652,340.77625,181.404,344.88123,181.404,344.88123Z"
fill="#00684a"
/></svg
>

View File

@@ -4,9 +4,7 @@
<svg
class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-10 w-10' : 'mx-auto w-8 h-8'}
height="64"
viewBox="0 0 32 32"
width="64"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
><defs

View File

@@ -0,0 +1,121 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<svg
viewBox="0 0 700 240"
xmlns="http://www.w3.org/2000/svg"
class={isAbsolute ? 'w-36 absolute top-0 left-0 -m-3 -mt-5' : 'w-28 mx-auto'}
><path fill="#FDBC3D" d="m90.694 107.498-.981.39-20.608 8.23 6.332 6.547z" /><path
fill="#8EC63F"
d="M61.139 77.914 46.632 93 56.9 103.547c8.649-7.169 17.832-10.502 18.653-10.789L61.139 77.914z"
/><path fill="#208ECB" d="M61.139 77.914 46.367 63.247l-14.228 14.8 14.493 14.952z" /><path
fill="#273C8B"
d="m40.767 57.48-6.943 2.79a38.381 38.381 0 0 0-11.742 7.418L32.14 78.047l14.228-14.8-5.601-5.768z"
/><path
fill="#EE4649"
d="m119.074 138.128-.243-.25-5.653 5.675c1.897-1.516 4.287-3.66 5.896-5.425z"
/><path
fill="#F6944E"
d="m102.088 150.087 3.709-1.875a46.26 46.26 0 0 0 7.381-4.659l5.653-5.676-14.311-15.285-14.493 15.072 12.061 12.423z"
/><path fill="#FFC951" d="m90.279 107.926-14.842 14.74 14.589 14.998 14.493-15.072z" /><path
fill="#F6CC18"
d="m69.087 116.125-11.256 4.493c-3.301.973-6.096 2.843-8.434 5.081l11.548 11.892 14.493-14.926-6.35-6.54z"
/><path
fill="#C5D82D"
d="m56.886 103.559-10.253-10.56L32 107.926l11.784 11.991c3.304-6.888 8.174-12.272 13.103-16.358z"
/><path fill="#0D77B3" d="m32.14 78.047-14.507 14.94 14.365 14.939 14.634-14.927z" /><path
fill="#2A377E"
d="M32.14 78.047 22.08 67.688a38.573 38.573 0 0 0-11.093 18.455l6.645 6.843 14.506-14.94z"
/><path
fill="#DA2128"
d="m94.826 162.454-4.87 5.017 14.808 15.397c-.632-1.942-1.606-4.438-2.58-6.307l-7.357-14.107z"
/><path
fill="#F8A561"
d="m91.24 155.575 10.832-5.48-12.046-12.43-14.506 14.939 14.436 14.867 4.87-5.017z"
/><path fill="#FDBC3D" d="m75.437 122.665-14.493 14.926 14.576 15.013 14.506-14.94z" /><path
fill="#FAD412"
d="M49.397 125.7c-6.71 6.472-9.664 16.047-9.664 16.047-.3-4.606.06-8.83.907-12.698l-8.513 8.742 14.311 14.74 14.506-14.94-11.547-11.892z"
/><path
fill="#C4D52D"
d="m43.783 119.917-11.785-11.991-13.29 13.687 3.708 6.178 9.71 10 8.52-8.775a42.699 42.699 0 0 1 3.137-9.099z"
/><path
fill="#1B80C1"
d="m17.633 92.986-7.638 7.72c.65 5.1 2.35 10.3 5.193 15.04l3.52 5.867 13.29-13.687-14.365-14.94z"
/><path
fill="#1A4685"
d="M10.989 86.143c-1.22 4.667-1.597 9.683-.993 14.563l7.638-7.72-6.645-6.843z"
/><path
fill="#B12026"
d="m89.956 197.35 12.502 13.022c4.143-8.355 5.148-18.255 2.307-27.504l-.302-.311-14.507 14.793z"
/><path fill="#E42028" d="M89.956 167.47 75.52 182.484l14.436 14.867 14.506-14.793z" /><path
fill="#F16B4E"
d="m75.52 152.604-14.576 14.867 14.576 15.012 14.436-15.012z"
/><path fill="#FAD412" d="m60.944 137.591-14.506 14.94 14.506 14.94 14.576-14.867z" /><path
fill="#FFC951"
d="m32.127 137.792-2.293 2.36 10.933 18.22 5.671-5.841z"
/><path fill="#FFC951" d="m22.416 127.79 7.418 12.363 2.293-2.361z" /><path
fill="#981C20"
d="M102.458 210.371 89.955 197.35 75.45 212.29l12.918 13.304a36.951 36.951 0 0 0 14.09-15.222z"
/><path
fill="#C92039"
d="m75.52 182.483-12.59 12.823 6.423 10.704 6.097 6.28 14.506-14.94z"
/><path fill="#F05B41" d="m60.944 167.47-9.096 9.369 11.081 18.467 12.59-12.823z" /><path
fill="#F6CC18"
d="m46.438 152.53-5.671 5.842 11.081 18.467 9.096-9.368z"
/><path
fill="#7A1319"
d="m74.01 213.772 8.904 14.838 4.104-2.237c.429-.233.934-.533 1.35-.78L75.45 212.29l-1.44 1.482z"
/><path fill="#981C20" d="m69.353 206.01 4.658 7.762 1.44-1.482z" /><path
fill="#15796E"
d="m147.842 48.094 10.653-10.971a41.81 41.81 0 0 0 .943-6.94l-11.414-11.755-14.48 14.94 14.298 14.726z"
/><path fill="#29B364" d="m133.53 33.354 14.494-14.926-2.737-2.965-20.95 8.422z" /><path
fill="#21A29F"
d="M151.819 52.189c3.057-4.334 5.434-9.932 6.677-15.066l-10.653 10.971 3.976 4.095z"
/><path
fill="#12827F"
d="M159.438 30.183c.307-6.28-.783-12.862-3.488-19.006l-1.41.567-6.516 6.684 11.414 11.755zM154.54 11.744l-9.253 3.72 2.737 2.964z"
/><path fill="#0C6355" d="m133.336 63.034 14.506-14.94-14.311-14.713-14.493 14.926z" /><path
fill="#1B974D"
d="m104.532 33.368 14.506 14.94 14.48-14.94-9.2-9.476-17.363 6.98z"
/><path fill="#16669F" d="m106.955 30.872-3.485 1.401 1.062 1.095z" /><path
fill="#44BFBD"
d="M135.9 65.674A41.696 41.696 0 0 0 151.82 52.19l-3.977-4.095-14.506 14.94 2.564 2.64z"
/><path
fill="#0D5650"
d="m115.71 74.76 11.052-4.956 6.574-6.77-14.298-14.727-14.506 14.94z"
/><path fill="#3FAF49" d="m119.038 48.307-14.506-14.94-14.576 14.868 14.563 14.999z" /><path
fill="#0D77B3"
d="m104.532 33.368-1.062-1.095-20.97 8.43 7.456 7.532z"
/><path
fill="#0C6355"
d="M134.766 66.217c.352-.157.789-.376 1.134-.543l-2.564-2.64-6.574 6.77 8.004-3.587z"
/><path fill="#12827F" d="m115.71 74.76-11.178-11.513-14.506 14.94 5.47 5.633z" /><path
fill="#4EB648"
d="M104.532 63.247 89.956 48.235 75.52 63.247l14.493 14.927z"
/><path fill="#16669F" d="M89.956 48.235 82.5 40.703l-20.868 8.388L75.52 63.247z" /><path
fill="#FBB139"
d="M129.526 119.012c1.902-7.144 2.108-15.019.353-22.538l-11.048 11.379 10.695 11.16z"
/><path
fill="#E2B523"
d="m110.62 99.542 8.21 8.311 11.049-11.38a46.303 46.303 0 0 0-1.186-4.149l-18.074 7.218z"
/><path fill="#189590" d="M90.026 78.186 76.128 92.501l19.367-8.681z" /><path
fill="#8EC63F"
d="m76.083 92.521 13.943-14.335-14.506-14.94-14.381 14.668 14.413 14.844z"
/><path
fill="#0D77B3"
d="M75.52 63.247 61.633 49.09l-2.264.91-13.002 13.246L61.14 77.914z"
/><path fill="#1953A2" d="m59.37 50.002-18.603 7.477 5.6 5.768z" /><path
fill="#ED3551"
d="M119.324 137.84c.885-.988 2.15-2.59 2.942-3.646l-3.17 3.41.228.236z"
/><path
fill="#F8A561"
d="m118.83 137.877 3.437-3.683a46.268 46.268 0 0 0 7.259-15.182l-10.695-11.159-14.311 14.74 14.31 15.284z"
/><path
fill="#E9B520"
d="m90.279 107.926 14.24 14.666 14.312-14.739-8.212-8.311-19.925 7.956z"
/><path
fill="#EE4649"
d="m118.83 137.877.244.251c.085-.095.166-.193.25-.288l-.228-.235-.265.272z"
/></svg
>

View File

@@ -0,0 +1,26 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<svg
class={isAbsolute ? 'w-10 h-10 absolute top-0 left-0 -m-5' : 'w-8 mx-auto'}
viewBox="0 0 81 84"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g clip-path="url(#clip0_5273_21928)">
<path
d="M79.7186 28.6019C82.1218 21.073 80.6778 6.03601 76.0158 0.487861C75.4073 -0.238064 74.2624 -0.134361 73.757 0.664158L68.0121 9.72786C66.5887 11.5427 64.0308 11.9575 62.1124 10.6923C55.8827 6.59601 48.4359 4.21082 40.4322 4.21082C32.4285 4.21082 24.9817 6.59601 18.752 10.6923C16.8336 11.9575 14.2757 11.5323 12.8523 9.72786L7.10738 0.664158C6.60199 -0.134361 5.45712 -0.238064 4.84859 0.487861C0.186621 6.03601 -1.25735 21.073 1.14583 28.6019C1.94002 31.1012 2.16693 33.7456 1.69248 36.3279C1.22834 38.879 0.753897 41.9693 0.753897 44.1056C0.753897 66.1323 18.5251 84.0004 40.4322 84.0004C62.3497 84.0004 80.1105 66.1427 80.1105 44.1056C80.1105 41.959 79.6464 38.879 79.1719 36.3279C78.6975 33.7456 78.9244 31.1012 79.7186 28.6019ZM40.4322 75.0819C23.4965 75.0819 9.71684 61.2271 9.71684 44.199C9.71684 43.639 9.73747 43.0893 9.7581 42.5397C10.3769 30.9353 17.3802 21.0108 27.3024 16.2819C31.2836 14.3738 35.7393 13.316 40.4322 13.316C45.1251 13.316 49.5808 14.3842 53.5724 16.2923C63.4945 21.0212 70.4978 30.9456 71.1166 42.5397C71.1476 43.0893 71.1579 43.639 71.1579 44.199C71.1476 61.2271 57.3679 75.0819 40.4322 75.0819Z"
fill="#1EB4D4"
/>
<path
d="M53.7371 56.083L45.8881 42.4044L39.153 30.997C38.9983 30.7274 38.7095 30.5615 38.3898 30.5615H31.9538C31.634 30.5615 31.3452 30.7378 31.1905 31.0074C31.0358 31.2874 31.0358 31.6296 31.2008 31.8993L37.6368 42.7881L28.9936 56.0415C28.8183 56.3111 28.7977 56.6637 28.9524 56.9541C29.1071 57.2444 29.4062 57.4207 29.7259 57.4207H36.2032C36.5023 57.4207 36.7808 57.2652 36.9458 57.0163L41.6181 49.6741L45.8056 56.9748C45.9603 57.2548 46.2594 57.4207 46.5688 57.4207H52.9533C53.273 57.4207 53.5618 57.2548 53.7165 56.9748C53.9022 56.6948 53.9022 56.363 53.7371 56.083Z"
fill="#1EB4D4"
/>
</g>
<defs>
<clipPath id="clip0_5273_21928">
<rect width="81" height="84" fill="white" />
</clipPath>
</defs>
</svg>

View File

@@ -0,0 +1,83 @@
<script lang="ts">
export let isAbsolute = false;
</script>
<svg
version="1.0"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 856.000000 856.000000"
preserveAspectRatio="xMidYMid meet"
class={isAbsolute ? 'w-10 h-10 absolute top-0 left-0 -m-5' : 'w-8 mx-auto'}
>
<metadata> Created by potrace 1.11, written by Peter Selinger 2001-2013 </metadata>
<g
transform="translate(0.000000,856.000000) scale(0.100000,-0.100000)"
fill="currentColor"
stroke="none"
>
<path
d="M4027 8163 c-2 -2 -28 -5 -58 -7 -50 -4 -94 -9 -179 -22 -19 -2 -48
-6 -65 -9 -47 -6 -236 -44 -280 -55 -22 -6 -49 -12 -60 -15 -34 -6 -58 -13
-130 -36 -38 -13 -72 -23 -75 -24 -29 -6 -194 -66 -264 -96 -49 -22 -95 -39
-102 -39 -7 0 -19 -7 -28 -15 -8 -9 -18 -15 -21 -14 -7 1 -197 -92 -205 -101
-3 -3 -21 -13 -40 -24 -79 -42 -123 -69 -226 -137 -94 -62 -246 -173 -280
-204 -6 -5 -29 -25 -52 -43 -136 -111 -329 -305 -457 -462 -21 -25 -41 -47
-44 -50 -4 -3 -22 -26 -39 -52 -18 -25 -38 -52 -45 -60 -34 -35 -207 -308
-259 -408 -13 -25 -25 -47 -28 -50 -11 -11 -121 -250 -159 -346 -42 -105 -114
-321 -126 -374 l-7 -30 -263 0 c-245 0 -268 -2 -321 -21 -94 -35 -171 -122
-191 -216 -9 -39 -8 -852 0 -938 9 -87 16 -150 23 -195 3 -19 6 -48 8 -65 3
-29 14 -97 22 -140 3 -11 7 -36 10 -55 3 -19 9 -51 14 -70 5 -19 11 -46 14
-60 29 -138 104 -401 145 -505 5 -11 23 -58 42 -105 18 -47 42 -105 52 -130
11 -25 21 -49 22 -55 3 -10 109 -224 164 -330 18 -33 50 -89 71 -124 22 -34
40 -64 40 -66 0 -8 104 -161 114 -167 6 -4 7 -8 3 -8 -4 0 4 -12 18 -27 14
-15 25 -32 25 -36 0 -5 6 -14 13 -21 6 -7 21 -25 32 -41 11 -15 34 -44 50 -64
17 -21 41 -52 55 -70 13 -18 33 -43 45 -56 11 -13 42 -49 70 -81 100 -118 359
-369 483 -469 34 -27 62 -53 62 -57 0 -5 6 -8 13 -8 7 0 19 -9 27 -20 8 -11
19 -20 26 -20 6 0 19 -9 29 -20 10 -11 22 -20 27 -20 5 0 23 -13 41 -30 18
-16 37 -30 44 -30 6 0 13 -4 15 -8 3 -8 186 -132 194 -132 2 0 27 -15 56 -34
132 -83 377 -207 558 -280 36 -15 74 -31 85 -36 62 -26 220 -81 320 -109 79
-23 191 -53 214 -57 14 -3 28 -7 31 -9 4 -2 20 -7 36 -9 16 -3 40 -8 54 -11
14 -3 36 -8 50 -11 14 -2 36 -7 50 -10 13 -3 40 -8 60 -10 19 -2 46 -7 60 -10
54 -10 171 -25 320 -40 90 -9 613 -12 636 -4 11 5 28 4 37 -1 9 -6 17 -6 17
-1 0 4 10 8 23 9 29 0 154 12 192 18 17 3 46 7 65 9 70 10 131 20 183 32 16 3
38 7 50 9 45 7 165 36 252 60 50 14 100 28 112 30 12 3 34 10 48 15 14 5 25 7
25 4 0 -4 6 -2 13 3 6 6 30 16 52 22 22 7 47 15 55 18 8 4 17 7 20 7 10 2 179
68 240 94 96 40 342 159 395 191 17 10 53 30 80 46 28 15 81 47 118 71 37 24
72 44 76 44 5 0 11 3 13 8 2 4 30 25 63 47 33 22 62 42 65 45 3 3 50 38 105
79 55 40 105 79 110 85 6 6 24 22 40 34 85 65 465 430 465 447 0 3 8 13 18 23
9 10 35 40 57 66 22 27 47 56 55 65 8 9 42 52 74 96 32 44 71 96 85 115 140
183 358 576 461 830 12 30 28 69 36 85 24 56 123 355 117 355 -3 0 -1 6 5 13
6 6 14 30 18 52 10 48 9 46 17 65 5 13 37 155 52 230 9 42 35 195 40 231 34
235 40 357 40 804 l0 420 -24 44 c-46 87 -143 157 -231 166 -19 2 -144 4 -276
4 l-242 1 -36 118 c-21 64 -46 139 -56 166 -11 27 -20 52 -20 57 0 5 -11 33
-25 63 -14 30 -25 58 -25 61 0 18 -152 329 -162 333 -5 2 -8 10 -8 18 0 8 -4
14 -10 14 -5 0 -9 3 -8 8 3 9 -40 82 -128 217 -63 97 -98 145 -187 259 -133
171 -380 420 -559 564 -71 56 -132 102 -138 102 -5 0 -10 3 -10 8 0 4 -25 23
-55 42 -30 19 -55 38 -55 43 0 4 -6 7 -13 7 -7 0 -22 8 -33 18 -11 9 -37 26
-59 37 -21 11 -44 25 -50 30 -41 37 -413 220 -540 266 -27 9 -61 22 -75 27
-14 5 -28 10 -32 11 -4 1 -28 10 -53 21 -25 11 -46 19 -48 18 -2 -1 -109 29
-137 40 -13 4 -32 9 -65 16 -5 1 -16 5 -22 9 -7 5 -13 6 -13 3 0 -2 -15 0 -32
5 -18 5 -44 11 -58 14 -14 3 -36 7 -50 10 -14 3 -50 9 -80 15 -30 6 -64 12
-75 14 -11 2 -45 6 -75 10 -30 4 -71 9 -90 12 -19 3 -53 6 -75 7 -22 1 -44 5
-50 8 -11 7 -542 9 -548 2z m57 -404 c7 10 436 8 511 -3 22 -3 60 -8 85 -11
25 -2 56 -6 70 -9 14 -2 43 -7 65 -10 38 -5 58 -9 115 -21 14 -3 34 -7 45 -9
11 -2 58 -14 105 -26 47 -12 92 -23 100 -25 35 -7 279 -94 308 -109 17 -9 34
-16 37 -16 3 1 20 -6 38 -14 17 -8 68 -31 112 -51 44 -20 82 -35 84 -35 2 1 7
-3 10 -8 3 -5 43 -28 88 -51 45 -23 87 -48 93 -56 7 -8 17 -15 22 -15 12 0
192 -121 196 -132 2 -4 8 -8 13 -8 10 0 119 -86 220 -172 102 -87 256 -244
349 -357 25 -30 53 -63 63 -73 9 -10 17 -22 17 -28 0 -5 3 -10 8 -10 4 0 25
-27 46 -60 22 -33 43 -60 48 -60 4 0 8 -5 8 -11 0 -6 11 -25 25 -43 14 -18 25
-38 25 -44 0 -7 4 -12 8 -12 5 0 16 -15 25 -32 9 -18 30 -55 47 -83 46 -77
161 -305 154 -305 -4 0 -2 -6 4 -12 6 -7 23 -47 40 -88 16 -41 33 -84 37 -95
5 -11 9 -22 10 -25 0 -3 11 -36 24 -73 13 -38 21 -70 19 -73 -3 -2 -1386 -3
-3075 -2 l-3071 3 38 110 c47 137 117 301 182 425 62 118 167 295 191 320 9
11 17 22 17 25 0 7 39 63 58 83 6 7 26 35 44 60 18 26 37 52 43 57 6 6 34 37
61 70 48 59 271 286 329 335 17 14 53 43 80 65 28 22 52 42 55 45 3 3 21 17
40 30 19 14 40 28 45 32 40 32 105 78 109 78 3 0 28 16 55 35 26 19 53 35 58
35 5 0 18 8 29 18 17 15 53 35 216 119 118 60 412 176 422 166 3 -4 6 -2 6 4
0 6 12 13 28 16 15 3 52 12 82 21 30 9 63 19 73 21 10 2 27 7 37 10 10 3 29 8
42 10 13 3 48 10 78 16 30 7 61 12 68 12 6 0 12 4 12 9 0 5 5 6 10 3 6 -4 34
-2 63 4 51 11 71 13 197 26 36 4 67 9 69 11 2 2 10 -1 17 -7 8 -6 14 -7 18 0z"
/>
</g>
</svg>

View File

@@ -15,7 +15,6 @@ export function findBuildPack(pack, packageManager = 'npm') {
...metaData,
...defaultBuildAndDeploy(packageManager),
buildCommand: null,
startCommand: null,
publishDirectory: null,
port: null
};
@@ -153,6 +152,26 @@ export function findBuildPack(pack, packageManager = 'npm') {
port: 8000
};
}
if (pack === 'deno') {
return {
...metaData,
installCommand: null,
buildCommand: null,
startCommand: null,
publishDirectory: null,
port: 8000
};
}
if (pack === 'laravel') {
return {
...metaData,
installCommand: null,
buildCommand: null,
startCommand: null,
publishDirectory: null,
port: 80
};
}
return {
name: 'node',
fancyName: 'Node.js',
@@ -178,18 +197,25 @@ export const buildPacks = [
hoverColor: 'hover:bg-orange-700',
color: 'bg-orange-700'
},
{
name: 'docker',
fancyName: 'Docker',
hoverColor: 'hover:bg-sky-700',
color: 'bg-sky-700'
},
{
name: 'php',
fancyName: 'PHP',
hoverColor: 'hover:bg-indigo-700',
color: 'bg-indigo-700'
},
{
name: 'laravel',
fancyName: 'Laravel',
hoverColor: 'hover:bg-indigo-700',
color: 'bg-indigo-700'
},
{
name: 'docker',
fancyName: 'Docker',
hoverColor: 'hover:bg-sky-700',
color: 'bg-sky-700'
},
{
name: 'svelte',
fancyName: 'Svelte',
@@ -262,6 +288,12 @@ export const buildPacks = [
fancyName: 'Python',
hoverColor: 'hover:bg-green-700',
color: 'bg-green-700'
},
{
name: 'deno',
fancyName: 'Deno',
hoverColor: 'hover:bg-green-700',
color: 'bg-green-700'
}
];
export const scanningTemplates = {

View File

@@ -1,13 +1,13 @@
import crypto from 'crypto';
const algorithm = 'aes-256-ctr';
export const base64Encode = (text: string) => {
export const base64Encode = (text: string): string => {
return Buffer.from(text).toString('base64');
};
export const base64Decode = (text: string) => {
export const base64Decode = (text: string): string => {
return Buffer.from(text, 'base64').toString('ascii');
};
export const encrypt = (text: string) => {
export const encrypt = (text: string): string => {
if (text) {
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv);
@@ -19,7 +19,7 @@ export const encrypt = (text: string) => {
}
};
export const decrypt = (hashString: string) => {
export const decrypt = (hashString: string): string => {
if (hashString) {
const hash: Hash = JSON.parse(hashString);
const decipher = crypto.createDecipheriv(

View File

@@ -1,10 +1,20 @@
import { decrypt, encrypt } from '$lib/crypto';
import { asyncExecShell, getEngine } from '$lib/common';
import { getDomain, removeDestinationDocker } from '$lib/common';
import { removeDestinationDocker } from '$lib/common';
import { prisma } from './common';
export async function listApplications(teamId) {
import type {
DestinationDocker,
GitSource,
Secret,
ApplicationSettings,
Application,
ApplicationPersistentStorage
} from '@prisma/client';
import { setDefaultBaseImage } from '$lib/buildPacks/common';
export async function listApplications(teamId: string): Promise<Application[]> {
if (teamId === '0') {
return await prisma.application.findMany({ include: { teams: true } });
}
@@ -14,7 +24,13 @@ export async function listApplications(teamId) {
});
}
export async function newApplication({ name, teamId }) {
export async function newApplication({
name,
teamId
}: {
name: string;
teamId: string;
}): Promise<Application> {
return await prisma.application.create({
data: {
name,
@@ -24,34 +40,17 @@ export async function newApplication({ name, teamId }) {
});
}
export async function importApplication({
name,
teamId,
fqdn,
port,
buildCommand,
startCommand,
installCommand
}) {
return await prisma.application.create({
data: {
name,
fqdn,
port,
buildCommand,
startCommand,
installCommand,
teams: { connect: { id: teamId } }
}
});
}
export async function removeApplication({ id, teamId }) {
const { fqdn, destinationDockerId, destinationDocker } = await prisma.application.findUnique({
export async function removeApplication({
id,
teamId
}: {
id: string;
teamId: string;
}): Promise<void> {
const { destinationDockerId, destinationDocker } = await prisma.application.findUnique({
where: { id },
include: { destinationDocker: true }
});
const domain = getDomain(fqdn);
if (destinationDockerId) {
const host = getEngine(destinationDocker.engine);
const { stdout: containers } = await asyncExecShell(
@@ -62,7 +61,6 @@ export async function removeApplication({ id, teamId }) {
for (const container of containersArray) {
const containerObj = JSON.parse(container);
const id = containerObj.ID;
const preview = containerObj.Image.split('-')[1];
await removeDestinationDocker({ id, engine: destinationDocker.engine });
}
}
@@ -80,9 +78,23 @@ export async function removeApplication({ id, teamId }) {
}
}
export async function getApplicationWebhook({ projectId, branch }) {
export async function getApplicationWebhook({
projectId,
branch
}: {
projectId: number;
branch: string;
}): Promise<
Application & {
destinationDocker: DestinationDocker;
settings: ApplicationSettings;
gitSource: GitSource;
secrets: Secret[];
persistentStorage: ApplicationPersistentStorage[];
}
> {
try {
let application = await prisma.application.findFirst({
const application = await prisma.application.findFirst({
where: { projectId, branch, settings: { autodeploy: true } },
include: {
destinationDocker: true,
@@ -126,21 +138,33 @@ export async function getApplicationWebhook({ projectId, branch }) {
return s;
});
}
return { ...application };
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
application.buildPack
);
// Set default build images
if (!application.baseImage) {
application.baseImage = baseImage;
}
if (!application.baseBuildImage) {
application.baseBuildImage = baseBuildImage;
}
return { ...application, baseBuildImages, baseImages };
} catch (e) {
throw { status: 404, body: { message: e.message } };
}
}
export async function getApplicationById({ id }) {
const body = await prisma.application.findFirst({
where: { id },
include: { destinationDocker: true }
});
return { ...body };
}
export async function getApplication({ id, teamId }) {
let body = {};
export async function getApplication({ id, teamId }: { id: string; teamId: string }): Promise<
Application & {
destinationDocker: DestinationDocker;
settings: ApplicationSettings;
gitSource: GitSource;
secrets: Secret[];
persistentStorage: ApplicationPersistentStorage[];
}
> {
let body;
if (teamId === '0') {
body = await prisma.application.findFirst({
where: { id },
@@ -183,8 +207,18 @@ export async function getApplication({ id, teamId }) {
return s;
});
}
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
body.buildPack
);
return { ...body };
// Set default build images
if (!body.baseImage) {
body.baseImage = baseImage;
}
if (!body.baseBuildImage) {
body.baseBuildImage = baseBuildImage;
}
return { ...body, baseBuildImages, baseImages };
}
export async function configureGitRepository({
@@ -194,7 +228,14 @@ export async function configureGitRepository({
projectId,
webhookToken,
autodeploy
}) {
}: {
id: string;
repository: string;
branch: string;
projectId: number;
webhookToken: string;
autodeploy: boolean;
}): Promise<void> {
if (webhookToken) {
const encryptedWebhookToken = encrypt(webhookToken);
await prisma.application.update({
@@ -224,7 +265,10 @@ export async function configureGitRepository({
}
}
export async function configureBuildPack({ id, buildPack }) {
export async function configureBuildPack({
id,
buildPack
}: Pick<Application, 'id' | 'buildPack'>): Promise<Application> {
return await prisma.application.update({ where: { id }, data: { buildPack } });
}
@@ -234,6 +278,7 @@ export async function configureApplication({
name,
fqdn,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
@@ -241,8 +286,33 @@ export async function configureApplication({
publishDirectory,
pythonWSGI,
pythonModule,
pythonVariable
}) {
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage
}: {
id: string;
buildPack: string;
name: string;
fqdn: string;
port: number;
exposePort: number;
installCommand: string;
buildCommand: string;
startCommand: string;
baseDirectory: string;
publishDirectory: string;
pythonWSGI: string;
pythonModule: string;
pythonVariable: string;
dockerFileLocation: string;
denoMainFile: string;
denoOptions: string;
baseImage: string;
baseBuildImage: string;
}): Promise<Application> {
return await prisma.application.update({
where: { id },
data: {
@@ -250,6 +320,7 @@ export async function configureApplication({
buildPack,
fqdn,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
@@ -257,16 +328,34 @@ export async function configureApplication({
publishDirectory,
pythonWSGI,
pythonModule,
pythonVariable
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage
}
});
}
export async function checkDoubleBranch(branch, projectId) {
export async function checkDoubleBranch(branch: string, projectId: number): Promise<boolean> {
const applications = await prisma.application.findMany({ where: { branch, projectId } });
return applications.length > 1;
}
export async function setApplicationSettings({ id, debug, previews, dualCerts, autodeploy }) {
export async function setApplicationSettings({
id,
debug,
previews,
dualCerts,
autodeploy
}: {
id: string;
debug: boolean;
previews: boolean;
dualCerts: boolean;
autodeploy: boolean;
}): Promise<Application & { destinationDocker: DestinationDocker }> {
return await prisma.application.update({
where: { id },
data: { settings: { update: { debug, previews, dualCerts, autodeploy } } },
@@ -274,29 +363,6 @@ export async function setApplicationSettings({ id, debug, previews, dualCerts, a
});
}
export async function createBuild({
id,
applicationId,
destinationDockerId,
gitSourceId,
githubAppId,
gitlabAppId,
type
}) {
return await prisma.build.create({
data: {
id,
applicationId,
destinationDockerId,
gitSourceId,
githubAppId,
gitlabAppId,
status: 'running',
type
}
});
}
export async function getPersistentStorage(id) {
export async function getPersistentStorage(id: string): Promise<ApplicationPersistentStorage[]> {
return await prisma.applicationPersistentStorage.findMany({ where: { applicationId: id } });
}

View File

@@ -1,7 +1,16 @@
import { getDomain } from '$lib/common';
import { prisma } from './common';
import type { Application, ServiceSecret, DestinationDocker, Secret } from '@prisma/client';
export async function isBranchAlreadyUsed({ repository, branch, id }) {
export async function isBranchAlreadyUsed({
repository,
branch,
id
}: {
id: string;
repository: string;
branch: string;
}): Promise<Application> {
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: true }
@@ -11,18 +20,42 @@ export async function isBranchAlreadyUsed({ repository, branch, id }) {
});
}
export async function isDockerNetworkExists({ network }) {
export async function isDockerNetworkExists({
network
}: {
network: string;
}): Promise<DestinationDocker> {
return await prisma.destinationDocker.findFirst({ where: { network } });
}
export async function isServiceSecretExists({ id, name }) {
export async function isServiceSecretExists({
id,
name
}: {
id: string;
name: string;
}): Promise<ServiceSecret> {
return await prisma.serviceSecret.findFirst({ where: { name, serviceId: id } });
}
export async function isSecretExists({ id, name, isPRMRSecret }) {
export async function isSecretExists({
id,
name,
isPRMRSecret
}: {
id: string;
name: string;
isPRMRSecret: boolean;
}): Promise<Secret> {
return await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
}
export async function isDomainConfigured({ id, fqdn }) {
export async function isDomainConfigured({
id,
fqdn
}: {
id: string;
fqdn: string;
}): Promise<boolean> {
const domain = getDomain(fqdn);
const nakedDomain = domain.replace('www.', '');
const foundApp = await prisma.application.findFirst({
@@ -55,6 +88,5 @@ export async function isDomainConfigured({ id, fqdn }) {
},
select: { fqdn: true }
});
if (foundApp || foundService || coolifyFqdn) return true;
return false;
return !!(foundApp || foundService || coolifyFqdn);
}

View File

@@ -6,15 +6,17 @@ import {
} from '$lib/components/common';
import * as Prisma from '@prisma/client';
import { default as ProdPrisma } from '@prisma/client';
import type { PrismaClientOptions } from '@prisma/client/runtime';
import type { Database, DatabaseSettings } from '@prisma/client';
import generator from 'generate-password';
import forge from 'node-forge';
import getPort, { portNumbers } from 'get-port';
export function generatePassword(length = 24) {
export function generatePassword(length = 24, symbols = false): string {
return generator.generate({
length,
numbers: true,
strict: true
strict: true,
symbols
});
}
@@ -30,8 +32,14 @@ export const prisma = new PrismaClient({
rejectOnNotFound: false
});
export function ErrorHandler(e) {
if (e! instanceof Error) {
export function ErrorHandler(e: {
stdout?;
message?: string;
status?: number;
name?: string;
error?: string;
}): { status: number; body: { message: string; error: string } } {
if (e && e instanceof Error) {
e = new Error(e.toString());
}
let truncatedError = e;
@@ -39,8 +47,7 @@ export function ErrorHandler(e) {
truncatedError = e.stdout;
}
if (e.message?.includes('docker run')) {
let truncatedArray = [];
truncatedArray = truncatedError.message.split('-').filter((line) => {
const truncatedArray: string[] = truncatedError.message.split('-').filter((line) => {
if (!line.startsWith('e ')) {
return line;
}
@@ -51,7 +58,7 @@ export function ErrorHandler(e) {
truncatedError.message = 'git clone failed';
}
if (!e.message?.includes('Coolify Proxy is not running')) {
sentry.captureException(truncatedError);
// sentry.captureException(truncatedError);
}
const payload = {
status: truncatedError.status || 500,
@@ -68,11 +75,11 @@ export function ErrorHandler(e) {
payload.body.message = 'Already exists. Choose another name.';
}
}
// console.error(e)
return payload;
}
export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> {
return await new Promise(async (resolve, reject) => {
return await new Promise((resolve, reject) => {
forge.pki.rsa.generateKeyPair({ bits: 4096, workers: -1 }, function (err, keys) {
if (keys) {
resolve({
@@ -86,35 +93,107 @@ export async function generateSshKeyPair(): Promise<{ publicKey: string; private
});
}
export function getVersions(type) {
export function getVersions(type: string): string[] {
const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type);
if (found) {
return found.versions;
}
return [];
}
export function getDatabaseImage(type) {
export function getDatabaseImage(type: string): string {
const found = supportedDatabaseTypesAndVersions.find((t) => t.name === type);
if (found) {
return found.baseImage;
}
return '';
}
export function getServiceImage(type) {
export function getServiceImage(type: string): string {
const found = supportedServiceTypesAndVersions.find((t) => t.name === type);
if (found) {
return found.baseImage;
}
return '';
}
export function getServiceImages(type) {
export function getServiceImages(type: string): string[] {
const found = supportedServiceTypesAndVersions.find((t) => t.name === type);
if (found) {
return found.images;
}
return [];
}
export function generateDatabaseConfiguration(database) {
export function generateDatabaseConfiguration(database: Database & { settings: DatabaseSettings }):
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
MYSQL_DATABASE: string;
MYSQL_PASSWORD: string;
MYSQL_ROOT_USER: string;
MYSQL_USER: string;
MYSQL_ROOT_PASSWORD: string;
};
}
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
MONGODB_ROOT_USER: string;
MONGODB_ROOT_PASSWORD: string;
};
}
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
MARIADB_ROOT_USER: string;
MARIADB_ROOT_PASSWORD: string;
MARIADB_USER: string;
MARIADB_PASSWORD: string;
MARIADB_DATABASE: string;
};
}
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
POSTGRESQL_POSTGRES_PASSWORD: string;
POSTGRESQL_USERNAME: string;
POSTGRESQL_PASSWORD: string;
POSTGRESQL_DATABASE: string;
};
}
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
REDIS_AOF_ENABLED: string;
REDIS_PASSWORD: string;
};
}
| {
volume: string;
image: string;
ulimits: Record<string, unknown>;
privatePort: number;
environmentVariables: {
COUCHDB_PASSWORD: string;
COUCHDB_USER: string;
};
} {
const {
id,
dbUser,
@@ -129,7 +208,6 @@ export function generateDatabaseConfiguration(database) {
const baseImage = getDatabaseImage(type);
if (type === 'mysql') {
return {
// url: `mysql://${dbUser}:${dbUserPassword}@${id}:${isPublic ? port : 3306}/${defaultDatabase}`,
privatePort: 3306,
environmentVariables: {
MYSQL_USER: dbUser,
@@ -142,9 +220,22 @@ export function generateDatabaseConfiguration(database) {
volume: `${id}-${type}-data:/bitnami/mysql/data`,
ulimits: {}
};
} else if (type === 'mariadb') {
return {
privatePort: 3306,
environmentVariables: {
MARIADB_ROOT_USER: rootUser,
MARIADB_ROOT_PASSWORD: rootUserPassword,
MARIADB_USER: dbUser,
MARIADB_PASSWORD: dbUserPassword,
MARIADB_DATABASE: defaultDatabase
},
image: `${baseImage}:${version}`,
volume: `${id}-${type}-data:/bitnami/mariadb`,
ulimits: {}
};
} else if (type === 'mongodb') {
return {
// url: `mongodb://${dbUser}:${dbUserPassword}@${id}:${isPublic ? port : 27017}/${defaultDatabase}`,
privatePort: 27017,
environmentVariables: {
MONGODB_ROOT_USER: rootUser,
@@ -156,7 +247,6 @@ export function generateDatabaseConfiguration(database) {
};
} else if (type === 'postgresql') {
return {
// url: `psql://${dbUser}:${dbUserPassword}@${id}:${isPublic ? port : 5432}/${defaultDatabase}`,
privatePort: 5432,
environmentVariables: {
POSTGRESQL_POSTGRES_PASSWORD: rootUserPassword,
@@ -170,7 +260,6 @@ export function generateDatabaseConfiguration(database) {
};
} else if (type === 'redis') {
return {
// url: `redis://${dbUser}:${dbUserPassword}@${id}:${isPublic ? port : 6379}/${defaultDatabase}`,
privatePort: 6379,
environmentVariables: {
REDIS_PASSWORD: dbUserPassword,
@@ -182,7 +271,6 @@ export function generateDatabaseConfiguration(database) {
};
} else if (type === 'couchdb') {
return {
// url: `couchdb://${dbUser}:${dbUserPassword}@${id}:${isPublic ? port : 5984}/${defaultDatabase}`,
privatePort: 5984,
environmentVariables: {
COUCHDB_PASSWORD: dbUserPassword,
@@ -193,18 +281,30 @@ export function generateDatabaseConfiguration(database) {
ulimits: {}
};
}
// } else if (type === 'clickhouse') {
// return {
// url: `clickhouse://${dbUser}:${dbUserPassword}@${id}:${port}/${defaultDatabase}`,
// privatePort: 9000,
// image: `bitnami/clickhouse-server:${version}`,
// volume: `${id}-${type}-data:/var/lib/clickhouse`,
// ulimits: {
// nofile: {
// soft: 262144,
// hard: 262144
// }
// }
// }
// }
}
export async function getFreePort() {
const data = await prisma.setting.findFirst();
const { minPort, maxPort } = data;
const dbUsed = await (
await prisma.database.findMany({
where: { publicPort: { not: null } },
select: { publicPort: true }
})
).map((a) => a.publicPort);
const wpFtpUsed = await (
await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
select: { ftpPublicPort: true }
})
).map((a) => a.ftpPublicPort);
const wpUsed = await (
await prisma.wordpress.findMany({
where: { mysqlPublicPort: { not: null } },
select: { mysqlPublicPort: true }
})
).map((a) => a.mysqlPublicPort);
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed];
return await getPort({ port: portNumbers(minPort, maxPort), exclude: usedPorts });
}

View File

@@ -1,12 +1,11 @@
import { decrypt, encrypt } from '$lib/crypto';
import * as db from '$lib/database';
import cuid from 'cuid';
import { generatePassword } from '.';
import { prisma, ErrorHandler } from './common';
import getPort, { portNumbers } from 'get-port';
import { prisma } from './common';
import { asyncExecShell, getEngine, removeContainer } from '$lib/common';
import type { Database, DatabaseSettings, DestinationDocker } from '@prisma/client';
export async function listDatabases(teamId) {
export async function listDatabases(teamId: string): Promise<Database[]> {
if (teamId === '0') {
return await prisma.database.findMany({ include: { teams: true } });
} else {
@@ -16,7 +15,14 @@ export async function listDatabases(teamId) {
});
}
}
export async function newDatabase({ name, teamId }) {
export async function newDatabase({
name,
teamId
}: {
name: string;
teamId: string;
}): Promise<Database> {
const dbUser = cuid();
const dbUserPassword = encrypt(generatePassword());
const rootUser = cuid();
@@ -37,8 +43,14 @@ export async function newDatabase({ name, teamId }) {
});
}
export async function getDatabase({ id, teamId }) {
let body = {};
export async function getDatabase({
id,
teamId
}: {
id: string;
teamId: string;
}): Promise<Database & { destinationDocker: DestinationDocker; settings: DatabaseSettings }> {
let body;
if (teamId === '0') {
body = await prisma.database.findFirst({
where: { id },
@@ -50,20 +62,25 @@ export async function getDatabase({ id, teamId }) {
include: { destinationDocker: true, settings: true }
});
}
if (body.dbUserPassword) body.dbUserPassword = decrypt(body.dbUserPassword);
if (body.rootUserPassword) body.rootUserPassword = decrypt(body.rootUserPassword);
return { ...body };
return body;
}
export async function removeDatabase({ id }) {
export async function removeDatabase({ id }: { id: string }): Promise<void> {
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } });
return;
}
export async function configureDatabaseType({ id, type }) {
export async function configureDatabaseType({
id,
type
}: {
id: string;
type: string;
}): Promise<Database> {
return await prisma.database.update({
where: { id },
data: { type }
@@ -79,7 +96,7 @@ export async function setDatabase({
version?: string;
isPublic?: boolean;
appendOnly?: boolean;
}) {
}): Promise<Database> {
return await prisma.database.update({
where: { id },
data: {
@@ -97,7 +114,16 @@ export async function updateDatabase({
rootUser,
rootUserPassword,
version
}) {
}: {
id: string;
name: string;
defaultDatabase: string;
dbUser: string;
dbUserPassword: string;
rootUser: string;
rootUserPassword: string;
version: string;
}): Promise<Database> {
const encryptedDbUserPassword = dbUserPassword && encrypt(dbUserPassword);
const encryptedRootUserPassword = rootUserPassword && encrypt(rootUserPassword);
return await prisma.database.update({
@@ -114,7 +140,9 @@ export async function updateDatabase({
});
}
export async function stopDatabase(database) {
export async function stopDatabase(
database: Database & { destinationDocker: DestinationDocker }
): Promise<boolean> {
let everStarted = false;
const {
id,
@@ -156,6 +184,10 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) {
await asyncExecShell(
`DOCKER_HOST=${host} docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
);
} else if (type === 'mariadb') {
await asyncExecShell(
`DOCKER_HOST=${host} docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
);
} else if (type === 'postgresql') {
if (isRoot) {
await asyncExecShell(

View File

@@ -1,11 +1,22 @@
import { asyncExecShell, getEngine } from '$lib/common';
import { decrypt, encrypt } from '$lib/crypto';
import { dockerInstance } from '$lib/docker';
import { startCoolifyProxy } from '$lib/haproxy';
import { getDatabaseImage } from '.';
import { prisma } from './common';
import type { DestinationDocker, Service, Application, Prisma } from '@prisma/client';
import type { CreateDockerDestination } from '$lib/types/destinations';
export async function listDestinations(teamId) {
type DestinationConfigurationObject = {
id: string;
destinationId: string;
};
type FindDestinationFromTeam = {
id: string;
teamId: string;
};
export async function listDestinations(teamId: string): Promise<DestinationDocker[]> {
if (teamId === '0') {
return await prisma.destinationDocker.findMany({ include: { teams: true } });
}
@@ -15,19 +26,28 @@ export async function listDestinations(teamId) {
});
}
export async function configureDestinationForService({ id, destinationId }) {
export async function configureDestinationForService({
id,
destinationId
}: DestinationConfigurationObject): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { destinationDocker: { connect: { id: destinationId } } }
});
}
export async function configureDestinationForApplication({ id, destinationId }) {
export async function configureDestinationForApplication({
id,
destinationId
}: DestinationConfigurationObject): Promise<Application> {
return await prisma.application.update({
where: { id },
data: { destinationDocker: { connect: { id: destinationId } } }
});
}
export async function configureDestinationForDatabase({ id, destinationId }) {
export async function configureDestinationForDatabase({
id,
destinationId
}: DestinationConfigurationObject): Promise<void> {
await prisma.database.update({
where: { id },
data: { destinationDocker: { connect: { id: destinationId } } }
@@ -48,7 +68,12 @@ export async function configureDestinationForDatabase({ id, destinationId }) {
}
}
}
export async function updateDestination({ id, name, engine, network }) {
export async function updateDestination({
id,
name,
engine,
network
}: Pick<DestinationDocker, 'id' | 'name' | 'engine' | 'network'>): Promise<DestinationDocker> {
return await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
}
@@ -58,13 +83,8 @@ export async function newRemoteDestination({
engine,
network,
isCoolifyProxyUsed,
remoteEngine,
ipAddress,
user,
port,
sshPrivateKey
}) {
const encryptedPrivateKey = encrypt(sshPrivateKey);
remoteEngine
}: CreateDockerDestination): Promise<string> {
const destination = await prisma.destinationDocker.create({
data: {
name,
@@ -72,16 +92,18 @@ export async function newRemoteDestination({
engine,
network,
isCoolifyProxyUsed,
remoteEngine,
ipAddress,
user,
port,
sshPrivateKey: encryptedPrivateKey
remoteEngine
}
});
return destination.id;
}
export async function newLocalDestination({ name, teamId, engine, network, isCoolifyProxyUsed }) {
export async function newLocalDestination({
name,
teamId,
engine,
network,
isCoolifyProxyUsed
}: CreateDockerDestination): Promise<string> {
const host = getEngine(engine);
const docker = dockerInstance({ destinationDocker: { engine, network } });
const found = await docker.engine.listNetworks({ filters: { name: [`^${network}$`] } });
@@ -99,18 +121,14 @@ export async function newLocalDestination({ name, teamId, engine, network, isCoo
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
);
if (proxyConfigured) {
if (proxyConfigured.isCoolifyProxyUsed) {
isCoolifyProxyUsed = true;
} else {
isCoolifyProxyUsed = false;
}
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
}
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
}
if (isCoolifyProxyUsed) await startCoolifyProxy(engine);
return destination.id;
}
export async function removeDestination({ id }) {
export async function removeDestination({ id }: Pick<DestinationDocker, 'id'>): Promise<void> {
const destination = await prisma.destinationDocker.delete({ where: { id } });
if (destination.isCoolifyProxyUsed) {
const host = getEngine(destination.engine);
@@ -127,8 +145,11 @@ export async function removeDestination({ id }) {
}
}
export async function getDestination({ id, teamId }) {
let destination = {};
export async function getDestination({
id,
teamId
}: FindDestinationFromTeam): Promise<DestinationDocker & { sshPrivateKey?: string }> {
let destination;
if (teamId === '0') {
destination = await prisma.destinationDocker.findFirst({
where: { id }
@@ -141,13 +162,22 @@ export async function getDestination({ id, teamId }) {
return destination;
}
export async function getDestinationByApplicationId({ id, teamId }) {
export async function getDestinationByApplicationId({
id,
teamId
}: FindDestinationFromTeam): Promise<DestinationDocker> {
return await prisma.destinationDocker.findFirst({
where: { application: { some: { id } }, teams: { some: { id: teamId } } }
});
}
export async function setDestinationSettings({ engine, isCoolifyProxyUsed }) {
export async function setDestinationSettings({
engine,
isCoolifyProxyUsed
}: {
engine: string;
isCoolifyProxyUsed: boolean;
}): Promise<Prisma.BatchPayload> {
return await prisma.destinationDocker.updateMany({
where: { engine },
data: { isCoolifyProxyUsed }

View File

@@ -1,7 +1,10 @@
import { decrypt, encrypt } from '$lib/crypto';
import { prisma } from './common';
import type { GithubApp, GitlabApp, GitSource, Prisma, Application } from '@prisma/client';
export async function listSources(teamId) {
export async function listSources(
teamId: string | Prisma.StringFilter
): Promise<(GitSource & { githubApp?: GithubApp; gitlabApp?: GitlabApp })[]> {
if (teamId === '0') {
return await prisma.gitSource.findMany({
include: { githubApp: true, gitlabApp: true, teams: true }
@@ -13,7 +16,13 @@ export async function listSources(teamId) {
});
}
export async function newSource({ teamId, name }) {
export async function newSource({
name,
teamId
}: {
name: string;
teamId: string;
}): Promise<GitSource> {
return await prisma.gitSource.create({
data: {
name,
@@ -21,7 +30,7 @@ export async function newSource({ teamId, name }) {
}
});
}
export async function removeSource({ id }) {
export async function removeSource({ id }: { id: string }): Promise<void> {
const source = await prisma.gitSource.delete({
where: { id },
include: { githubApp: true, gitlabApp: true }
@@ -30,8 +39,14 @@ export async function removeSource({ id }) {
if (source.gitlabAppId) await prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
}
export async function getSource({ id, teamId }) {
let body = {};
export async function getSource({
id,
teamId
}: {
id: string;
teamId: string;
}): Promise<GitSource & { githubApp: GithubApp; gitlabApp: GitlabApp }> {
let body;
if (teamId === '0') {
body = await prisma.gitSource.findFirst({
where: { id },
@@ -51,8 +66,11 @@ export async function getSource({ id, teamId }) {
if (body?.gitlabApp?.appSecret) body.gitlabApp.appSecret = decrypt(body.gitlabApp.appSecret);
return body;
}
export async function addGitHubSource({ id, teamId, type, name, htmlUrl, apiUrl }) {
await prisma.gitSource.update({ where: { id }, data: { type, name, htmlUrl, apiUrl } });
export async function addGitHubSource({ id, teamId, type, name, htmlUrl, apiUrl, organization }) {
await prisma.gitSource.update({
where: { id },
data: { type, name, htmlUrl, apiUrl, organization }
});
return await prisma.githubApp.create({
data: {
teams: { connect: { id: teamId } },
@@ -72,7 +90,7 @@ export async function addGitLabSource({
appSecret,
groupName
}) {
const encrptedAppSecret = encrypt(appSecret);
const encryptedAppSecret = encrypt(appSecret);
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name } });
return await prisma.gitlabApp.create({
data: {
@@ -80,19 +98,35 @@ export async function addGitLabSource({
appId,
oauthId,
groupName,
appSecret: encrptedAppSecret,
appSecret: encryptedAppSecret,
gitSource: { connect: { id } }
}
});
}
export async function configureGitsource({ id, gitSourceId }) {
export async function configureGitsource({
id,
gitSourceId
}: {
id: string;
gitSourceId: string;
}): Promise<Application> {
return await prisma.application.update({
where: { id },
data: { gitSource: { connect: { id: gitSourceId } } }
});
}
export async function updateGitsource({ id, name, htmlUrl, apiUrl }) {
export async function updateGitsource({
id,
name,
htmlUrl,
apiUrl
}: {
id: string;
name: string;
htmlUrl: string;
apiUrl: string;
}): Promise<GitSource> {
return await prisma.gitSource.update({
where: { id },
data: { name, htmlUrl, apiUrl }

View File

@@ -1,7 +1,15 @@
import { decrypt, encrypt } from '$lib/crypto';
import { prisma } from './common';
import type { GithubApp } from '@prisma/client';
export async function addInstallation({ gitSourceId, installation_id }) {
// TODO: We should change installation_id to be camelCase
export async function addInstallation({
gitSourceId,
installation_id
}: {
gitSourceId: string;
installation_id: string;
}): Promise<GithubApp> {
const source = await prisma.gitSource.findUnique({
where: { id: gitSourceId },
include: { githubApp: true }
@@ -12,8 +20,12 @@ export async function addInstallation({ gitSourceId, installation_id }) {
});
}
export async function getUniqueGithubApp({ githubAppId }) {
let body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
export async function getUniqueGithubApp({
githubAppId
}: {
githubAppId: string;
}): Promise<GithubApp> {
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
return body;
}
@@ -26,7 +38,15 @@ export async function createGithubApp({
pem,
webhook_secret,
state
}) {
}: {
id: number;
client_id: string;
slug: string;
client_secret: string;
pem: string;
webhook_secret: string;
state: string;
}): Promise<GithubApp> {
const encryptedClientSecret = encrypt(client_secret);
const encryptedWebhookSecret = encrypt(webhook_secret);
const encryptedPem = encrypt(pem);

View File

@@ -1,7 +1,14 @@
import { encrypt } from '$lib/crypto';
import { generateSshKeyPair, prisma } from './common';
import type { GitlabApp } from '@prisma/client';
export async function updateDeployKey({ id, deployKeyId }) {
export async function updateDeployKey({
id,
deployKeyId
}: {
id: string;
deployKeyId: number;
}): Promise<GitlabApp> {
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }
@@ -11,14 +18,24 @@ export async function updateDeployKey({ id, deployKeyId }) {
data: { deployKeyId }
});
}
export async function getSshKey({ id }) {
export async function getSshKey({
id
}: {
id: string;
}): Promise<{ status: number; body: { publicKey: string } }> {
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }
});
return { status: 200, body: { publicKey: application.gitSource.gitlabApp.publicSshKey } };
}
export async function generateSshKey({ id }) {
export async function generateSshKey({
id
}: {
id: string;
}): Promise<
{ status: number; body: { publicKey: string } } | { status: number; body?: undefined }
> {
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }

View File

@@ -1,6 +1,13 @@
import type { BuildLog } from '@prisma/client';
import { prisma, ErrorHandler } from './common';
export async function listLogs({ buildId, last = 0 }) {
export async function listLogs({
buildId,
last = 0
}: {
buildId: string;
last: number;
}): Promise<BuildLog[] | { status: number; body: { message: string; error: string } }> {
try {
const body = await prisma.buildLog.findMany({
where: { buildId, time: { gt: last } },

View File

@@ -1,7 +1,8 @@
import { encrypt, decrypt } from '$lib/crypto';
import { prisma } from './common';
import type { ServiceSecret, Secret, Prisma } from '@prisma/client';
export async function listServiceSecrets(serviceId: string) {
export async function listServiceSecrets(serviceId: string): Promise<ServiceSecret[]> {
let secrets = await prisma.serviceSecret.findMany({
where: { serviceId },
orderBy: { createdAt: 'desc' }
@@ -14,7 +15,7 @@ export async function listServiceSecrets(serviceId: string) {
return secrets;
}
export async function listSecrets(applicationId: string) {
export async function listSecrets(applicationId: string): Promise<Secret[]> {
let secrets = await prisma.secret.findMany({
where: { applicationId },
orderBy: { createdAt: 'desc' }
@@ -27,20 +28,48 @@ export async function listSecrets(applicationId: string) {
return secrets;
}
export async function createServiceSecret({ id, name, value }) {
export async function createServiceSecret({
id,
name,
value
}: {
id: string;
name: string;
value: string;
}): Promise<ServiceSecret> {
value = encrypt(value);
return await prisma.serviceSecret.create({
data: { name, value, service: { connect: { id } } }
});
}
export async function createSecret({ id, name, value, isBuildSecret, isPRMRSecret }) {
export async function createSecret({
id,
name,
value,
isBuildSecret,
isPRMRSecret
}: {
id: string;
name: string;
value: string;
isBuildSecret: boolean;
isPRMRSecret: boolean;
}): Promise<Secret> {
value = encrypt(value);
return await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
});
}
export async function updateServiceSecret({ id, name, value }) {
export async function updateServiceSecret({
id,
name,
value
}: {
id: string;
name: string;
value: string;
}): Promise<Prisma.BatchPayload | ServiceSecret> {
value = encrypt(value);
const found = await prisma.serviceSecret.findFirst({ where: { serviceId: id, name } });
@@ -55,7 +84,19 @@ export async function updateServiceSecret({ id, name, value }) {
});
}
}
export async function updateSecret({ id, name, value, isBuildSecret, isPRMRSecret }) {
export async function updateSecret({
id,
name,
value,
isBuildSecret,
isPRMRSecret
}: {
id: string;
name: string;
value: string;
isBuildSecret: boolean;
isPRMRSecret: boolean;
}): Promise<Prisma.BatchPayload | Secret> {
value = encrypt(value);
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
@@ -71,10 +112,22 @@ export async function updateSecret({ id, name, value, isBuildSecret, isPRMRSecre
}
}
export async function removeServiceSecret({ id, name }) {
export async function removeServiceSecret({
id,
name
}: {
id: string;
name: string;
}): Promise<Prisma.BatchPayload> {
return await prisma.serviceSecret.deleteMany({ where: { serviceId: id, name } });
}
export async function removeSecret({ id, name }) {
export async function removeSecret({
id,
name
}: {
id: string;
name: string;
}): Promise<Prisma.BatchPayload> {
return await prisma.secret.deleteMany({ where: { applicationId: id, name } });
}

View File

@@ -1,10 +1,30 @@
import { asyncExecShell, getEngine } from '$lib/common';
import { decrypt, encrypt } from '$lib/crypto';
import type { Minio, Prisma, Service } from '@prisma/client';
import cuid from 'cuid';
import { generatePassword } from '.';
import { prisma } from './common';
export async function listServices(teamId) {
const include: Prisma.ServiceInclude = {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true,
meiliSearch: true,
umami: true,
hasura: true,
fider: true
};
export async function listServicesWithIncludes() {
return await prisma.service.findMany({
include,
orderBy: { createdAt: 'desc' }
});
}
export async function listServices(teamId: string): Promise<Service[]> {
if (teamId === '0') {
return await prisma.service.findMany({ include: { teams: true } });
} else {
@@ -15,22 +35,18 @@ export async function listServices(teamId) {
}
}
export async function newService({ name, teamId }) {
export async function newService({
name,
teamId
}: {
name: string;
teamId: string;
}): Promise<Service> {
return await prisma.service.create({ data: { name, teams: { connect: { id: teamId } } } });
}
export async function getService({ id, teamId }) {
let body = {};
const include = {
destinationDocker: true,
plausibleAnalytics: true,
minio: true,
vscodeserver: true,
wordpress: true,
ghost: true,
serviceSecret: true,
meiliSearch: true
};
export async function getService({ id, teamId }: { id: string; teamId: string }): Promise<Service> {
let body;
if (teamId === '0') {
body = await prisma.service.findFirst({
where: { id },
@@ -43,6 +59,12 @@ export async function getService({ id, teamId }) {
});
}
if (body?.serviceSecret.length > 0) {
body.serviceSecret = body.serviceSecret.map((s) => {
s.value = decrypt(s.value);
return s;
});
}
if (body.plausibleAnalytics?.postgresqlPassword)
body.plausibleAnalytics.postgresqlPassword = decrypt(
body.plausibleAnalytics.postgresqlPassword
@@ -69,21 +91,37 @@ export async function getService({ id, teamId }) {
if (body.meiliSearch?.masterKey) body.meiliSearch.masterKey = decrypt(body.meiliSearch.masterKey);
if (body?.serviceSecret.length > 0) {
body.serviceSecret = body.serviceSecret.map((s) => {
s.value = decrypt(s.value);
return s;
});
}
if (body.wordpress?.ftpPassword) {
body.wordpress.ftpPassword = decrypt(body.wordpress.ftpPassword);
}
if (body.wordpress?.ftpPassword) body.wordpress.ftpPassword = decrypt(body.wordpress.ftpPassword);
if (body.umami?.postgresqlPassword)
body.umami.postgresqlPassword = decrypt(body.umami.postgresqlPassword);
if (body.umami?.umamiAdminPassword)
body.umami.umamiAdminPassword = decrypt(body.umami.umamiAdminPassword);
if (body.umami?.hashSalt) body.umami.hashSalt = decrypt(body.umami.hashSalt);
if (body.hasura?.postgresqlPassword)
body.hasura.postgresqlPassword = decrypt(body.hasura.postgresqlPassword);
if (body.hasura?.graphQLAdminPassword)
body.hasura.graphQLAdminPassword = decrypt(body.hasura.graphQLAdminPassword);
if (body.fider?.postgresqlPassword)
body.fider.postgresqlPassword = decrypt(body.fider.postgresqlPassword);
if (body.fider?.jwtSecret) body.fider.jwtSecret = decrypt(body.fider.jwtSecret);
if (body.fider?.emailSmtpPassword)
body.fider.emailSmtpPassword = decrypt(body.fider.emailSmtpPassword);
const settings = await prisma.setting.findFirst();
return { ...body, settings };
}
export async function configureServiceType({ id, type }) {
export async function configureServiceType({
id,
type
}: {
id: string;
type: string;
}): Promise<void> {
if (type === 'plausibleanalytics') {
const password = encrypt(generatePassword());
const postgresqlUser = cuid();
@@ -197,48 +235,236 @@ export async function configureServiceType({ id, type }) {
meiliSearch: { create: { masterKey } }
}
});
} else if (type === 'umami') {
const umamiAdminPassword = encrypt(generatePassword());
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword());
const postgresqlDatabase = 'umami';
const hashSalt = encrypt(generatePassword(64));
await prisma.service.update({
where: { id },
data: {
type,
umami: {
create: {
umamiAdminPassword,
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
hashSalt
}
}
}
});
} else if (type === 'hasura') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword());
const postgresqlDatabase = 'hasura';
const graphQLAdminPassword = encrypt(generatePassword());
await prisma.service.update({
where: { id },
data: {
type,
hasura: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
graphQLAdminPassword
}
}
}
});
} else if (type === 'fider') {
const postgresqlUser = cuid();
const postgresqlPassword = encrypt(generatePassword());
const postgresqlDatabase = 'fider';
const jwtSecret = encrypt(generatePassword(64, true));
await prisma.service.update({
where: { id },
data: {
type,
fider: {
create: {
postgresqlDatabase,
postgresqlPassword,
postgresqlUser,
jwtSecret
}
}
}
});
}
}
export async function setServiceVersion({ id, version }) {
export async function setServiceVersion({
id,
version
}: {
id: string;
version: string;
}): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { version }
});
}
export async function setServiceSettings({ id, dualCerts }) {
export async function setServiceSettings({
id,
dualCerts
}: {
id: string;
dualCerts: boolean;
}): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { dualCerts }
});
}
export async function updatePlausibleAnalyticsService({ id, fqdn, email, username, name }) {
export async function updatePlausibleAnalyticsService({
id,
fqdn,
email,
exposePort,
username,
name
}: {
id: string;
fqdn: string;
exposePort?: number;
name: string;
email: string;
username: string;
}): Promise<void> {
await prisma.plausibleAnalytics.update({ where: { serviceId: id }, data: { email, username } });
await prisma.service.update({ where: { id }, data: { name, fqdn } });
await prisma.service.update({ where: { id }, data: { name, fqdn, exposePort } });
}
export async function updateService({ id, fqdn, name }) {
return await prisma.service.update({ where: { id }, data: { fqdn, name } });
export async function updateService({
id,
fqdn,
exposePort,
name
}: {
id: string;
fqdn: string;
exposePort?: number;
name: string;
}): Promise<Service> {
return await prisma.service.update({ where: { id }, data: { fqdn, name, exposePort } });
}
export async function updateWordpress({ id, fqdn, name, mysqlDatabase, extraConfig }) {
export async function updateFiderService({
id,
fqdn,
name,
exposePort,
emailNoreply,
emailMailgunApiKey,
emailMailgunDomain,
emailMailgunRegion,
emailSmtpHost,
emailSmtpPort,
emailSmtpUser,
emailSmtpPassword,
emailSmtpEnableStartTls
}: {
id: string;
fqdn: string;
exposePort?: number;
name: string;
emailNoreply: string;
emailMailgunApiKey: string;
emailMailgunDomain: string;
emailMailgunRegion: string;
emailSmtpHost: string;
emailSmtpPort: number;
emailSmtpUser: string;
emailSmtpPassword: string;
emailSmtpEnableStartTls: boolean;
}): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { fqdn, name, wordpress: { update: { mysqlDatabase, extraConfig } } }
});
}
export async function updateMinioService({ id, publicPort }) {
return await prisma.minio.update({ where: { serviceId: id }, data: { publicPort } });
}
export async function updateGhostService({ id, fqdn, name, mariadbDatabase }) {
return await prisma.service.update({
where: { id },
data: { fqdn, name, ghost: { update: { mariadbDatabase } } }
data: {
fqdn,
name,
exposePort,
fider: {
update: {
emailNoreply,
emailMailgunApiKey,
emailMailgunDomain,
emailMailgunRegion,
emailSmtpHost,
emailSmtpPort,
emailSmtpUser,
emailSmtpPassword,
emailSmtpEnableStartTls
}
}
}
});
}
export async function removeService({ id }) {
export async function updateWordpress({
id,
fqdn,
name,
exposePort,
mysqlDatabase,
extraConfig
}: {
id: string;
fqdn: string;
name: string;
exposePort?: number;
mysqlDatabase: string;
extraConfig: string;
}): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { fqdn, name, exposePort, wordpress: { update: { mysqlDatabase, extraConfig } } }
});
}
export async function updateMinioService({
id,
publicPort
}: {
id: string;
publicPort: number;
}): Promise<Minio> {
return await prisma.minio.update({ where: { serviceId: id }, data: { publicPort } });
}
export async function updateGhostService({
id,
fqdn,
name,
exposePort,
mariadbDatabase
}: {
id: string;
fqdn: string;
name: string;
exposePort?: number;
mariadbDatabase: string;
}): Promise<Service> {
return await prisma.service.update({
where: { id },
data: { fqdn, name, exposePort, ghost: { update: { mariadbDatabase } } }
});
}
export async function removeService({ id }: { id: string }): Promise<void> {
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
await prisma.fider.deleteMany({ where: { serviceId: id } });
await prisma.ghost.deleteMany({ where: { serviceId: id } });
await prisma.umami.deleteMany({ where: { serviceId: id } });
await prisma.hasura.deleteMany({ where: { serviceId: id } });
await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
await prisma.minio.deleteMany({ where: { serviceId: id } });
await prisma.vscodeserver.deleteMany({ where: { serviceId: id } });

View File

@@ -1,8 +1,9 @@
import { decrypt } from '$lib/crypto';
import { prisma } from './common';
import type { Setting } from '@prisma/client';
export async function listSettings() {
let settings = await prisma.setting.findFirst({});
export async function listSettings(): Promise<Setting> {
const settings = await prisma.setting.findFirst({});
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
return settings;
}

View File

@@ -1,9 +1,10 @@
import type { Team, Permission } from '@prisma/client';
import { prisma } from './common';
export async function listTeams() {
export async function listTeams(): Promise<Team[]> {
return await prisma.team.findMany();
}
export async function newTeam({ name, userId }) {
export async function newTeam({ name, userId }: { name: string; userId: string }): Promise<Team> {
return await prisma.team.create({
data: {
name,
@@ -12,7 +13,11 @@ export async function newTeam({ name, userId }) {
}
});
}
export async function getMyTeams({ userId }) {
export async function getMyTeams({
userId
}: {
userId: string;
}): Promise<(Permission & { team: Team & { _count: { users: number } } })[]> {
return await prisma.permission.findMany({
where: { userId },
include: { team: { include: { _count: { select: { users: true } } } } }

View File

@@ -1,16 +1,30 @@
import cuid from 'cuid';
import bcrypt from 'bcrypt';
import bcrypt from 'bcryptjs';
import { prisma } from './common';
import { asyncExecShell, uniqueName } from '$lib/common';
import * as db from '$lib/database';
import { startCoolifyProxy } from '$lib/haproxy';
export async function hashPassword(password: string) {
import type { User } from '@prisma/client';
export async function hashPassword(password: string): Promise<string> {
const saltRounds = 15;
return bcrypt.hash(password, saltRounds);
}
export async function login({ email, password, isLogin }) {
export async function login({
email,
password,
isLogin
}: {
email: string;
password: string;
isLogin: boolean;
}): Promise<{
status: number;
headers: { 'Set-Cookie': string };
body: { userId: string; teamId: string; permission: string; isAdmin: boolean };
}> {
const users = await prisma.user.count();
const userFound = await prisma.user.findUnique({
where: { email },
@@ -32,8 +46,12 @@ export async function login({ email, password, isLogin }) {
if (users === 0) {
await prisma.setting.update({ where: { id }, data: { isRegistrationEnabled: false } });
// Create default network & start Coolify Proxy
await asyncExecShell(`docker network create --attachable coolify`);
await startCoolifyProxy('/var/run/docker.sock');
try {
await asyncExecShell(`docker network create --attachable coolify`);
} catch (error) {}
try {
await startCoolifyProxy('/var/run/docker.sock');
} catch (error) {}
uid = '0';
}
@@ -140,6 +158,6 @@ export async function login({ email, password, isLogin }) {
};
}
export async function getUser({ userId }) {
export async function getUser({ userId }: { userId: string }): Promise<User> {
return await prisma.user.findUnique({ where: { id: userId } });
}

View File

@@ -3,6 +3,34 @@ import { promises as fs } from 'fs';
import { checkPnpm } from './buildPacks/common';
import { saveBuildLog } from './common';
export async function buildCacheImageForLaravel(data, imageForBuild) {
const { applicationId, tag, workdir, docker, buildId, debug, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
});
}
Dockerfile.push(`COPY *.json *.mix.js /app/`);
Dockerfile.push(`COPY resources /app/resources`);
Dockerfile.push(`RUN yarn install && yarn production`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
}
export async function buildCacheImageWithNode(data, imageForBuild) {
const {
applicationId,
@@ -21,7 +49,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.image=true`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
@@ -41,10 +69,11 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
if (installCommand) {
Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`);
Dockerfile.push(`RUN ${installCommand}`);
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${buildCommand}`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
@@ -65,11 +94,13 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
} = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push('RUN cargo install cargo-chef');
Dockerfile.push('COPY . .');
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push('RUN cargo install cargo-chef');
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
@@ -85,7 +116,8 @@ export async function buildImage({
docker,
buildId,
isCache = false,
debug = false
debug = false,
dockerFileLocation = '/Dockerfile'
}) {
if (isCache) {
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
@@ -103,11 +135,12 @@ export async function buildImage({
const stream = await docker.engine.buildImage(
{ src: ['.'], context: workdir },
{
dockerfile: isCache ? 'Dockerfile-cache' : 'Dockerfile',
dockerfile: isCache ? `${dockerFileLocation}-cache` : dockerFileLocation,
t: `${applicationId}:${tag}${isCache ? '-cache' : ''}`
}
);
await streamEvents({ stream, docker, buildId, applicationId, debug });
await saveBuildLog({ line: `Building image successful!`, buildId, applicationId });
}
export function dockerInstance({ destinationDocker }): { engine: Dockerode; network: string } {

View File

@@ -1,5 +1,6 @@
import { toast } from '@zerodevx/svelte-toast';
export function errorNotification(message: string) {
export function errorNotification(message: string): void {
console.error(message);
if (typeof message !== 'string') {
toast.push('Ooops, something is not okay, are you okay?');
@@ -30,7 +31,7 @@ export function enhance(
e.preventDefault();
let body = new FormData(form);
let parsedData = body;
const parsedData = body;
body.forEach((data, key) => {
if (data === '' || data === null) parsedData.delete(key);

View File

@@ -1,16 +1,16 @@
import { dev } from '$app/env';
import got from 'got';
import got, { type Got } from 'got';
import * as db from '$lib/database';
import mustache from 'mustache';
import crypto from 'crypto';
import * as db from '$lib/database';
import { checkContainer, checkHAProxy } from '.';
import { asyncExecShell, getDomain, getEngine } from '$lib/common';
import { supportedServiceTypesAndVersions } from '$lib/components/common';
import { listServicesWithIncludes } from '$lib/database';
const url = dev ? 'http://localhost:5555' : 'http://coolify-haproxy:5555';
let template = `program api
const template = `program api
command /usr/bin/dataplaneapi -f /usr/local/etc/haproxy/dataplaneapi.hcl --userlist haproxy-dataplaneapi
no option start-on-reload
@@ -21,10 +21,10 @@ global
defaults
mode http
log global
timeout http-request 60s
timeout http-request 120s
timeout connect 10s
timeout client 60s
timeout server 60s
timeout client 120s
timeout server 120s
userlist haproxy-dataplaneapi
user admin insecure-password "\${HAPROXY_PASSWORD}"
@@ -95,6 +95,8 @@ backend {{domain}}
{{/isHttps}}
http-request add-header X-Forwarded-Host %[req.hdr(host),lower]
server {{id}} {{id}}:{{port}}
compression algo gzip
compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype
{{/isRunning}}
{{/applications}}
@@ -111,6 +113,8 @@ backend {{domain}}
{{/isHttps}}
http-request add-header X-Forwarded-Host %[req.hdr(host),lower]
server {{id}} {{id}}:{{port}}
compression algo gzip
compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype
{{/isRunning}}
{{/services}}
@@ -126,9 +130,12 @@ backend {{domain}}
{{/isHttps}}
http-request add-header X-Forwarded-Host %[req.hdr(host),lower]
server {{id}} {{id}}:{{port}} check fall 10
compression algo gzip
compression type text/html text/css text/plain text/xml text/x-component text/javascript application/x-javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype
{{/coolify}}
`;
export async function haproxyInstance() {
export async function haproxyInstance(): Promise<Got> {
const { proxyPassword } = await db.listSettings();
return got.extend({
prefixUrl: url,
@@ -137,31 +144,87 @@ export async function haproxyInstance() {
});
}
export async function configureHAProxy() {
export async function configureHAProxy(): Promise<void> {
const haproxy = await haproxyInstance();
await checkHAProxy(haproxy);
try {
const data = {
applications: [],
services: [],
coolify: []
};
const applications = await db.prisma.application.findMany({
include: { destinationDocker: true, settings: true }
});
for (const application of applications) {
const {
fqdn,
id,
port,
destinationDocker,
destinationDockerId,
settings: { previews },
updatedAt
} = application;
if (destinationDockerId) {
const { engine, network } = destinationDocker;
const data = {
applications: [],
services: [],
coolify: []
};
const applications = await db.prisma.application.findMany({
include: { destinationDocker: true, settings: true }
});
for (const application of applications) {
const {
fqdn,
id,
port,
destinationDocker,
destinationDockerId,
settings: { previews },
updatedAt
} = application;
if (destinationDockerId) {
const { engine, network } = destinationDocker;
const isRunning = await checkContainer(engine, id);
if (fqdn) {
const domain = getDomain(fqdn);
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const redirectValue = `${isHttps ? 'https://' : 'http://'}${domain}%[capture.req.uri]`;
if (isRunning) {
data.applications.push({
id,
port: port || 3000,
domain,
isRunning,
isHttps,
redirectValue,
redirectTo: isWWW ? domain.replace('www.', '') : 'www.' + domain,
updatedAt: updatedAt.getTime()
});
}
if (previews) {
const host = getEngine(engine);
const { stdout } = await asyncExecShell(
`DOCKER_HOST=${host} docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"`
);
const containers = stdout
.trim()
.split('\n')
.filter((a) => a)
.map((c) => c.replace(/"/g, ''));
if (containers.length > 0) {
for (const container of containers) {
const previewDomain = `${container.split('-')[1]}.${domain}`;
data.applications.push({
id: container,
port: port || 3000,
domain: previewDomain,
isRunning,
isHttps,
redirectValue,
redirectTo: isWWW ? previewDomain.replace('www.', '') : 'www.' + previewDomain,
updatedAt: updatedAt.getTime()
});
}
}
}
}
}
}
const services = await listServicesWithIncludes();
for (const service of services) {
const { fqdn, id, type, destinationDocker, destinationDockerId, updatedAt } = service;
if (destinationDockerId) {
const { engine } = destinationDocker;
const found = supportedServiceTypesAndVersions.find((a) => a.name === type);
if (found) {
const port = found.ports.main;
const publicPort = service[type]?.publicPort;
const isRunning = await checkContainer(engine, id);
if (fqdn) {
const domain = getDomain(fqdn);
@@ -169,9 +232,10 @@ export async function configureHAProxy() {
const isWWW = fqdn.includes('www.');
const redirectValue = `${isHttps ? 'https://' : 'http://'}${domain}%[capture.req.uri]`;
if (isRunning) {
data.applications.push({
data.services.push({
id,
port: port || 3000,
port,
publicPort,
domain,
isRunning,
isHttps,
@@ -180,108 +244,38 @@ export async function configureHAProxy() {
updatedAt: updatedAt.getTime()
});
}
if (previews) {
const host = getEngine(engine);
const { stdout } = await asyncExecShell(
`DOCKER_HOST=${host} docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"`
);
const containers = stdout
.trim()
.split('\n')
.filter((a) => a)
.map((c) => c.replace(/"/g, ''));
if (containers.length > 0) {
for (const container of containers) {
let previewDomain = `${container.split('-')[1]}.${domain}`;
data.applications.push({
id: container,
port: port || 3000,
domain: previewDomain,
isRunning,
isHttps,
redirectValue,
redirectTo: isWWW ? previewDomain.replace('www.', '') : 'www.' + previewDomain,
updatedAt: updatedAt.getTime()
});
}
}
}
}
}
}
const services = await db.prisma.service.findMany({
include: {
destinationDocker: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true
}
const { fqdn } = await db.prisma.setting.findFirst();
if (fqdn) {
const domain = getDomain(fqdn);
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const redirectValue = `${isHttps ? 'https://' : 'http://'}${domain}%[capture.req.uri]`;
data.coolify.push({
id: dev ? 'host.docker.internal' : 'coolify',
port: 3000,
domain,
isHttps,
redirectValue,
redirectTo: isWWW ? domain.replace('www.', '') : 'www.' + domain
});
}
const output = mustache.render(template, data);
const newHash = crypto.createHash('md5').update(output).digest('hex');
const { proxyHash, id } = await db.listSettings();
if (proxyHash !== newHash) {
await db.prisma.setting.update({ where: { id }, data: { proxyHash: newHash } });
await haproxy.post(`v2/services/haproxy/configuration/raw`, {
searchParams: {
skip_version: true
},
body: output,
headers: {
'Content-Type': 'text/plain'
}
});
for (const service of services) {
const { fqdn, id, type, destinationDocker, destinationDockerId, updatedAt } = service;
if (destinationDockerId) {
const { engine } = destinationDocker;
const found = supportedServiceTypesAndVersions.find((a) => a.name === type);
if (found) {
const port = found.ports.main;
const publicPort = service[type]?.publicPort;
const isRunning = await checkContainer(engine, id);
if (fqdn) {
const domain = getDomain(fqdn);
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const redirectValue = `${isHttps ? 'https://' : 'http://'}${domain}%[capture.req.uri]`;
if (isRunning) {
data.services.push({
id,
port,
publicPort,
domain,
isRunning,
isHttps,
redirectValue,
redirectTo: isWWW ? domain.replace('www.', '') : 'www.' + domain,
updatedAt: updatedAt.getTime()
});
}
}
}
}
}
const { fqdn } = await db.prisma.setting.findFirst();
if (fqdn) {
const domain = getDomain(fqdn);
const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.');
const redirectValue = `${isHttps ? 'https://' : 'http://'}${domain}%[capture.req.uri]`;
data.coolify.push({
id: dev ? 'host.docker.internal' : 'coolify',
port: 3000,
domain,
isHttps,
redirectValue,
redirectTo: isWWW ? domain.replace('www.', '') : 'www.' + domain
});
}
const output = mustache.render(template, data);
const newHash = crypto.createHash('md5').update(output).digest('hex');
const { proxyHash, id } = await db.listSettings();
if (proxyHash !== newHash) {
await db.prisma.setting.update({ where: { id }, data: { proxyHash: newHash } });
await haproxy.post(`v2/services/haproxy/configuration/raw`, {
searchParams: {
skip_version: true
},
body: output,
headers: {
'Content-Type': 'text/plain'
}
});
}
} catch (error) {
throw error;
}
}

View File

@@ -1,7 +1,8 @@
import { dev } from '$app/env';
import { asyncExecShell, getEngine } from '$lib/common';
import got from 'got';
import got, { type Got, type Response } from 'got';
import * as db from '$lib/database';
import type { DestinationDocker } from '@prisma/client';
const url = dev ? 'http://localhost:5555' : 'http://coolify-haproxy:5555';
@@ -9,7 +10,7 @@ export const defaultProxyImage = `coolify-haproxy-alpine:latest`;
export const defaultProxyImageTcp = `coolify-haproxy-tcp-alpine:latest`;
export const defaultProxyImageHttp = `coolify-haproxy-http-alpine:latest`;
export async function haproxyInstance() {
export async function haproxyInstance(): Promise<Got> {
const { proxyPassword } = await db.listSettings();
return got.extend({
prefixUrl: url,
@@ -17,6 +18,7 @@ export async function haproxyInstance() {
password: proxyPassword
});
}
export async function getRawConfiguration(): Promise<RawHaproxyConfiguration> {
return await (await haproxyInstance()).get(`v2/services/haproxy/configuration/raw`).json();
}
@@ -43,11 +45,12 @@ export async function getNextTransactionId(): Promise<string> {
return newTransaction.id;
}
export async function completeTransaction(transactionId) {
export async function completeTransaction(transactionId: string): Promise<Response<string>> {
const haproxy = await haproxyInstance();
return await haproxy.put(`v2/services/haproxy/transactions/${transactionId}`);
}
export async function deleteProxy({ id }) {
export async function deleteProxy({ id }: { id: string }): Promise<void> {
const haproxy = await haproxyInstance();
await checkHAProxy(haproxy);
@@ -77,11 +80,12 @@ export async function deleteProxy({ id }) {
}
}
export async function reloadHaproxy(engine) {
export async function reloadHaproxy(engine: string): Promise<{ stdout: string; stderr: string }> {
const host = getEngine(engine);
return await asyncExecShell(`DOCKER_HOST=${host} docker exec coolify-haproxy kill -HUP 1`);
}
export async function checkHAProxy(haproxy?: any) {
export async function checkHAProxy(haproxy?: Got): Promise<void> {
if (!haproxy) haproxy = await haproxyInstance();
try {
await haproxy.get('v2/info');
@@ -93,7 +97,10 @@ export async function checkHAProxy(haproxy?: any) {
}
}
export async function stopTcpHttpProxy(destinationDocker, publicPort) {
export async function stopTcpHttpProxy(
destinationDocker: DestinationDocker,
publicPort: number
): Promise<{ stdout: string; stderr: string } | Error> {
const { engine } = destinationDocker;
const host = getEngine(engine);
const containerName = `haproxy-for-${publicPort}`;
@@ -108,16 +115,22 @@ export async function stopTcpHttpProxy(destinationDocker, publicPort) {
return error;
}
}
export async function startTcpProxy(destinationDocker, id, publicPort, privatePort, volume = null) {
export async function startTcpProxy(
destinationDocker: DestinationDocker,
id: string,
publicPort: number,
privatePort: number,
volume?: string
): Promise<{ stdout: string; stderr: string } | Error> {
const { network, engine } = destinationDocker;
const host = getEngine(engine);
const containerName = `haproxy-for-${publicPort}`;
const found = await checkContainer(engine, containerName);
const foundDB = await checkContainer(engine, id);
const found = await checkContainer(engine, containerName, true);
const foundDependentContainer = await checkContainer(engine, id, true);
try {
if (foundDB && !found) {
if (foundDependentContainer && !found) {
const { stdout: Config } = await asyncExecShell(
`DOCKER_HOST="${host}" docker network inspect bridge --format '{{json .IPAM.Config }}'`
);
@@ -128,20 +141,31 @@ export async function startTcpProxy(destinationDocker, id, publicPort, privatePo
} -d coollabsio/${defaultProxyImageTcp}`
);
}
if (!foundDependentContainer && found) {
return await asyncExecShell(
`DOCKER_HOST=${host} docker stop -t 0 ${containerName} && docker rm ${containerName}`
);
}
} catch (error) {
return error;
}
}
export async function startHttpProxy(destinationDocker, id, publicPort, privatePort) {
export async function startHttpProxy(
destinationDocker: DestinationDocker,
id: string,
publicPort: number,
privatePort: number
): Promise<{ stdout: string; stderr: string } | Error> {
const { network, engine } = destinationDocker;
const host = getEngine(engine);
const containerName = `haproxy-for-${publicPort}`;
const found = await checkContainer(engine, containerName);
const foundDB = await checkContainer(engine, id);
const found = await checkContainer(engine, containerName, true);
const foundDependentContainer = await checkContainer(engine, id, true);
try {
if (foundDB && !found) {
if (foundDependentContainer && !found) {
const { stdout: Config } = await asyncExecShell(
`DOCKER_HOST="${host}" docker network inspect bridge --format '{{json .IPAM.Config }}'`
);
@@ -150,13 +174,19 @@ export async function startHttpProxy(destinationDocker, id, publicPort, privateP
`DOCKER_HOST=${host} docker run --restart always -e PORT=${publicPort} -e APP=${id} -e PRIVATE_PORT=${privatePort} --add-host 'host.docker.internal:host-gateway' --add-host 'host.docker.internal:${ip}' --network ${network} -p ${publicPort}:${publicPort} --name ${containerName} -d coollabsio/${defaultProxyImageHttp}`
);
}
if (!foundDependentContainer && found) {
return await asyncExecShell(
`DOCKER_HOST=${host} docker stop -t 0 ${containerName} && docker rm ${containerName}`
);
}
} catch (error) {
return error;
}
}
export async function startCoolifyProxy(engine) {
export async function startCoolifyProxy(engine: string): Promise<void> {
const host = getEngine(engine);
const found = await checkContainer(engine, 'coolify-haproxy');
const found = await checkContainer(engine, 'coolify-haproxy', true);
const { proxyPassword, proxyUser, id } = await db.listSettings();
if (!found) {
const { stdout: Config } = await asyncExecShell(
@@ -170,7 +200,26 @@ export async function startCoolifyProxy(engine) {
}
await configureNetworkCoolifyProxy(engine);
}
export async function checkContainer(engine, container) {
export async function isContainerExited(engine: string, containerName: string): Promise<boolean> {
let isExited = false;
const host = getEngine(engine);
try {
const { stdout } = await asyncExecShell(
`DOCKER_HOST="${host}" docker inspect -f '{{.State.Status}}' ${containerName}`
);
if (stdout.trim() === 'exited') {
isExited = true;
}
} catch (error) {}
return isExited;
}
export async function checkContainer(
engine: string,
container: string,
remove: boolean = false
): Promise<boolean> {
const host = getEngine(engine);
let containerFound = false;
@@ -180,8 +229,11 @@ export async function checkContainer(engine, container) {
);
const parsedStdout = JSON.parse(stdout);
const status = parsedStdout.Status;
const isRunning = status === 'running' ? true : false;
if (status === 'exited' || status === 'created') {
const isRunning = status === 'running';
if (status === 'created') {
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
}
if (remove && status === 'exited') {
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
}
if (isRunning) {
@@ -193,7 +245,9 @@ export async function checkContainer(engine, container) {
return containerFound;
}
export async function stopCoolifyProxy(engine) {
export async function stopCoolifyProxy(
engine: string
): Promise<{ stdout: string; stderr: string } | Error> {
const host = getEngine(engine);
const found = await checkContainer(engine, 'coolify-haproxy');
await db.setDestinationSettings({ engine, isCoolifyProxyUsed: false });
@@ -210,16 +264,18 @@ export async function stopCoolifyProxy(engine) {
}
}
export async function configureNetworkCoolifyProxy(engine) {
export async function configureNetworkCoolifyProxy(engine: string): Promise<void> {
const host = getEngine(engine);
const destinations = await db.prisma.destinationDocker.findMany({ where: { engine } });
destinations.forEach(async (destination) => {
try {
const { stdout: networks } = await asyncExecShell(
`DOCKER_HOST="${host}" docker ps -a --filter name=coolify-haproxy --format '{{json .Networks}}'`
);
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
for (const destination of destinations) {
if (!configuredNetworks.includes(destination.network)) {
await asyncExecShell(
`DOCKER_HOST="${host}" docker network connect ${destination.network} coolify-haproxy`
);
} catch (err) {
// TODO: handle error
}
});
}
}

View File

@@ -2,11 +2,9 @@ import { asyncExecShell, saveBuildLog } from '$lib/common';
import got from 'got';
import jsonwebtoken from 'jsonwebtoken';
import * as db from '$lib/database';
import { ErrorHandler } from '$lib/database';
export default async function ({
applicationId,
debug,
workdir,
githubAppId,
repository,
@@ -14,7 +12,16 @@ export default async function ({
htmlUrl,
branch,
buildId
}): Promise<any> {
}: {
applicationId: string;
workdir: string;
githubAppId: string;
repository: string;
apiUrl: string;
htmlUrl: string;
branch: string;
buildId: string;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '');
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
const { privateKey, appId, installationId } = await db.getUniqueGithubApp({ githubAppId });

View File

@@ -9,7 +9,16 @@ export default async function ({
branch,
buildId,
privateSshKey
}): Promise<any> {
}: {
applicationId: string;
workdir: string;
repository: string;
htmlUrl: string;
branch: string;
buildId: string;
repodir: string;
privateSshKey: string;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);

4
src/lib/lang.json Normal file
View File

@@ -0,0 +1,4 @@
{
"fr": "Français",
"en": "English"
}

View File

@@ -6,9 +6,14 @@ import cuid from 'cuid';
import fs from 'fs/promises';
import getPort, { portNumbers } from 'get-port';
import { supportedServiceTypesAndVersions } from '$lib/components/common';
import { promises as dns } from 'dns';
import { listServicesWithIncludes } from '$lib/database';
export async function letsEncrypt(domain, id = null, isCoolify = false) {
export async function letsEncrypt(domain: string, id?: string, isCoolify = false): Promise<void> {
try {
const certbotImage =
process.arch === 'x64' ? 'certbot/certbot' : 'certbot/certbot:arm64v8-latest';
const data = await db.prisma.setting.findFirst();
const { minPort, maxPort } = data;
@@ -62,7 +67,7 @@ export async function letsEncrypt(domain, id = null, isCoolify = false) {
if (found) return;
await asyncExecShell(
`DOCKER_HOST=${host} docker run --rm --name certbot-${randomCuid} -p 9080:${randomPort} -v "coolify-letsencrypt:/etc/letsencrypt" certbot/certbot --logs-dir /etc/letsencrypt/logs certonly --standalone --preferred-challenges http --http-01-address 0.0.0.0 --http-01-port ${randomPort} -d ${nakedDomain} -d ${wwwDomain} --expand --agree-tos --non-interactive --register-unsafely-without-email ${
`DOCKER_HOST=${host} docker run --rm --name certbot-${randomCuid} -p 9080:${randomPort} -v "coolify-letsencrypt:/etc/letsencrypt" ${certbotImage} --logs-dir /etc/letsencrypt/logs certonly --standalone --preferred-challenges http --http-01-address 0.0.0.0 --http-01-port ${randomPort} -d ${nakedDomain} -d ${wwwDomain} --expand --agree-tos --non-interactive --register-unsafely-without-email ${
dev ? '--test-cert' : ''
}`
);
@@ -82,7 +87,7 @@ export async function letsEncrypt(domain, id = null, isCoolify = false) {
}
if (found) return;
await asyncExecShell(
`DOCKER_HOST=${host} docker run --rm --name certbot-${randomCuid} -p 9080:${randomPort} -v "coolify-letsencrypt:/etc/letsencrypt" certbot/certbot --logs-dir /etc/letsencrypt/logs certonly --standalone --preferred-challenges http --http-01-address 0.0.0.0 --http-01-port ${randomPort} -d ${domain} --expand --agree-tos --non-interactive --register-unsafely-without-email ${
`DOCKER_HOST=${host} docker run --rm --name certbot-${randomCuid} -p 9080:${randomPort} -v "coolify-letsencrypt:/etc/letsencrypt" ${certbotImage} --logs-dir /etc/letsencrypt/logs certonly --standalone --preferred-challenges http --http-01-address 0.0.0.0 --http-01-port ${randomPort} -d ${domain} --expand --agree-tos --non-interactive --register-unsafely-without-email ${
dev ? '--test-cert' : ''
}`
);
@@ -98,12 +103,13 @@ export async function letsEncrypt(domain, id = null, isCoolify = false) {
}
}
export async function generateSSLCerts() {
export async function generateSSLCerts(): Promise<void> {
const ssls = [];
const applications = await db.prisma.application.findMany({
include: { destinationDocker: true, settings: true },
orderBy: { createdAt: 'desc' }
});
const { fqdn, isDNSCheckEnabled } = await db.prisma.setting.findFirst();
for (const application of applications) {
try {
if (application.fqdn && application.destinationDockerId) {
@@ -131,7 +137,7 @@ export async function generateSSLCerts() {
.map((c) => c.replace(/"/g, ''));
if (containers.length > 0) {
for (const container of containers) {
let previewDomain = `${container.split('-')[1]}.${domain}`;
const previewDomain = `${container.split('-')[1]}.${domain}`;
if (isHttps) ssls.push({ domain: previewDomain, id, isCoolify: false });
}
}
@@ -141,18 +147,7 @@ export async function generateSSLCerts() {
console.log(`Error during generateSSLCerts with ${application.fqdn}: ${error}`);
}
}
const services = await db.prisma.service.findMany({
include: {
destinationDocker: true,
minio: true,
plausibleAnalytics: true,
vscodeserver: true,
wordpress: true,
ghost: true
},
orderBy: { createdAt: 'desc' }
});
const services = await listServicesWithIncludes();
for (const service of services) {
try {
if (service.fqdn && service.destinationDockerId) {
@@ -176,7 +171,6 @@ export async function generateSSLCerts() {
console.log(`Error during generateSSLCerts with ${service.fqdn}: ${error}`);
}
}
const { fqdn } = await db.prisma.setting.findFirst();
if (fqdn) {
const domain = getDomain(fqdn);
const isHttps = fqdn.startsWith('https://');
@@ -198,27 +192,128 @@ export async function generateSSLCerts() {
file.endsWith('.pem') && certificates.push(file.replace(/\.pem$/, ''));
}
}
for (const ssl of ssls) {
if (!dev) {
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
console.log(`Certificate for ${ssl.domain} already exists`);
if (isDNSCheckEnabled) {
const resolver = new dns.Resolver({ timeout: 2000 });
resolver.setServers(['8.8.8.8', '1.1.1.1']);
let ipv4, ipv6;
try {
ipv4 = await (await asyncExecShell(`curl -4s https://ifconfig.io`)).stdout;
} catch (error) {}
try {
ipv6 = await (await asyncExecShell(`curl -6s https://ifconfig.io`)).stdout;
} catch (error) {}
for (const ssl of ssls) {
if (!dev) {
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
// console.log(`Certificate for ${ssl.domain} already exists`);
} else {
// Checking DNS entry before generating certificate
if (ipv4 || ipv6) {
let domains4 = [];
let domains6 = [];
try {
domains4 = await resolver.resolve4(ssl.domain);
} catch (error) {}
try {
domains6 = await resolver.resolve6(ssl.domain);
} catch (error) {}
if (domains4.length > 0 || domains6.length > 0) {
if (
(ipv4 && domains4.includes(ipv4.replace('\n', ''))) ||
(ipv6 && domains6.includes(ipv6.replace('\n', '')))
) {
console.log('Generating SSL for', ssl.domain);
return await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify);
}
}
}
console.log('DNS settings is incorrect for', ssl.domain, 'skipping.');
}
} else {
console.log('Generating SSL for', ssl.domain);
await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify);
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
console.log(`Certificate for ${ssl.domain} already exists`);
} else {
// Checking DNS entry before generating certificate
if (ipv4 || ipv6) {
let domains4 = [];
let domains6 = [];
try {
domains4 = await resolver.resolve4(ssl.domain);
} catch (error) {}
try {
domains6 = await resolver.resolve6(ssl.domain);
} catch (error) {}
if (domains4.length > 0 || domains6.length > 0) {
if (
(ipv4 && domains4.includes(ipv4.replace('\n', ''))) ||
(ipv6 && domains6.includes(ipv6.replace('\n', '')))
) {
console.log('Generating SSL for', ssl.domain);
return;
}
}
}
console.log('DNS settings is incorrect for', ssl.domain, 'skipping.');
}
}
}
} else {
if (!dev) {
for (const ssl of ssls) {
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
} else {
console.log('Generating SSL for', ssl.domain);
return await letsEncrypt(ssl.domain, ssl.id, ssl.isCoolify);
}
}
} else {
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
console.log(`Certificate for ${ssl.domain} already exists`);
} else {
console.log('Generating SSL for', ssl.domain);
for (const ssl of ssls) {
if (
certificates.includes(ssl.domain) ||
certificates.includes(ssl.domain.replace('www.', ''))
) {
console.log(`Certificate for ${ssl.domain} already exists`);
} else {
console.log('Generating SSL for', ssl.domain);
}
}
}
}
}
}
export async function renewSSLCerts(): Promise<void> {
if (!dev) {
const host = 'unix:///var/run/docker.sock';
await asyncExecShell(`docker pull alpine:latest`);
const certbotImage =
process.arch === 'x64' ? 'certbot/certbot' : 'certbot/certbot:arm64v8-latest';
const { stdout: certificates } = await asyncExecShell(
`DOCKER_HOST=${host} docker run --rm -v "coolify-letsencrypt:/etc/letsencrypt" -v "coolify-ssl-certs:/app/ssl" alpine:latest sh -c "ls -1 /etc/letsencrypt/live/ | grep -v README"`
);
for (const certificate of certificates.trim().split('\n')) {
try {
await asyncExecShell(
`DOCKER_HOST=${host} docker run --rm --name certbot-renewal -p 9080:9080 -v "coolify-letsencrypt:/etc/letsencrypt" ${certbotImage} --cert-name ${certificate} --logs-dir /etc/letsencrypt/logs renew --standalone --preferred-challenges http --http-01-address 0.0.0.0 --http-01-port 9080`
);
await asyncExecShell(
`DOCKER_HOST=${host} docker run --rm -v "coolify-letsencrypt:/etc/letsencrypt" -v "coolify-ssl-certs:/app/ssl" alpine:latest sh -c "test -d /etc/letsencrypt/live/${certificate}/ && cat /etc/letsencrypt/live/${certificate}/fullchain.pem /etc/letsencrypt/live/${certificate}/privkey.pem > /app/ssl/${certificate}.pem"`
);
} catch (error) {
console.log(error);
}
}
await reloadHaproxy('unix:///var/run/docker.sock');
}
}

341
src/lib/locales/en.json Normal file
View File

@@ -0,0 +1,341 @@
{
"layout": {
"update_done": "Update completed.",
"wait_new_version_startup": "Waiting for the new version to start...",
"new_version": "New version reachable. Reloading...",
"switch_to_a_different_team": "Switch to a different team...",
"update_available": "Update available"
},
"error": {
"you_can_find_your_way_back": "You can find your way back",
"here": "here",
"you_are_lost": "Ooops you are lost! But don't be afraid!"
},
"index": {
"dashboard": "Dashboard",
"applications": "Applications",
"destinations": "Destinations",
"git_sources": "Git Sources",
"databases": "Databases",
"services": "Services",
"teams": "Teams",
"not_implemented_yet": "Not implemented yet",
"database": "Database",
"settings": "Settings",
"global_settings": "Global Settings",
"secret": "Secret",
"team": "Team",
"logout": "Logout"
},
"login": {
"already_logged_in": "Already logged in...",
"authenticating": "Authenticating...",
"login": "Login"
},
"forms": {
"password": "Password",
"email": "Email address",
"passwords_not_match": "Passwords do not match.",
"password_again": "Password again",
"save": "Save",
"saving": "Saving...",
"name": "Name",
"value": "Value",
"action": "Action",
"is_required": "is required.",
"add": "Add",
"set": "Set",
"remove": "Remove",
"path": "Path",
"confirm_continue": "Are you sure to continue?",
"must_be_stopped_to_modify": "Must be stopped to modify.",
"port": "Port",
"default": "default",
"base_directory": "Base Directory",
"publish_directory": "Publish Directory",
"generated_automatically_after_start": "Generated automatically after start",
"roots_password": "Root's Password",
"root_user": "Root User",
"eg": "eg",
"user": "User",
"loading": "Loading...",
"version": "Version",
"host": "Host",
"already_used_for": "<span class=\"text-red-500\">{{type}}</span> already used for",
"configuration": "Configuration",
"engine": "Engine",
"network": "Network",
"ip_address": "IP Address",
"ssh_private_key": "SSH Private Key",
"type": "Type",
"html_url": "HTML URL",
"api_url": "API URL",
"organization": "Organization",
"new_password": "New password",
"super_secure_new_password": "Super secure new password",
"submit": "Submit",
"default_email_address": "Default Email Address",
"default_password": "Default Password",
"username": "Username",
"root_db_user": "Root DB User",
"root_db_password": "Root DB Password",
"api_port": "API Port",
"verifying": "Verifying",
"verify_emails_without_smtp": "Verify emails without SMTP",
"extra_config": "Extra Config",
"select_a_service": "Select a Service",
"select_a_service_version": "Select a Service version",
"removing": "Removing...",
"remove_domain": "Remove domain",
"public_port_range": "Public Port Range",
"public_port_range_explainer": "Ports used to expose databases/services/internal services.<br> Add them to your firewall (if applicable).<br><br>You can specify a range of ports, eg: <span class='text-yellow-500 font-bold'>9000-9100</span>",
"no_actions_available": "No actions available",
"admin_api_key": "Admin API key"
},
"register": {
"register": "Register",
"registering": "Registering...",
"first_user": "You are registering the first user. It will be the administrator of your Coolify instance."
},
"reset": {
"reset_password": "Reset",
"invalid_secret_key": "Invalid secret key.",
"secret_key": "Secret Key",
"find_path_secret_key": "You can find it in ~/coolify/.env (COOLIFY_SECRET_KEY)"
},
"application": {
"configuration": {
"buildpack": {
"choose_this_one": "Choose this one..."
},
"branch_already_in_use": "This branch is already used by another application. Webhooks won't work in this case for both applications. Are you sure you want to use it?",
"no_repositories_configured": "No repositories configured for your Git Application.",
"configure_it_now": "Configure it now",
"loading_repositories": "Loading repositories ...",
"select_a_repository": "Please select a repository",
"loading_branches": "Loading branches ...",
"select_a_repository_first": "Please select a repository first",
"select_a_branch": "Please select a branch",
"loading_groups": "Loading groups...",
"select_a_group": "Please select a group",
"loading_projects": "Loading projects...",
"select_a_project": "Please select a project",
"no_projects_found": "No projects found",
"no_branches_found": "No branches found",
"configure_build_pack": "Configure Build Pack",
"scanning_repository_suggest_build_pack": "Scanning repository to suggest a build pack for you...",
"found_lock_file": "Found lock file for <span class=\"font-bold text-orange-500 px-1\"> {{packageManager}}</span>.Using it for predefined commands commands.",
"configure_destination": "Configure Destination",
"no_configurable_destination": "No configurable Destination found",
"select_a_repository_project": "Select a Repository / Project",
"select_a_git_source": "Select a Git Source",
"no_configurable_git": "No configurable Git Source found",
"configuration_missing": "Configuration missing"
},
"build": {
"queued_waiting_exec": "Queued and waiting for execution.",
"build_logs_of": "Build logs of",
"running": "Running",
"queued": "Queued",
"finished_in": "Finished in",
"load_more": "Load More",
"no_logs": "No logs found",
"waiting_logs": "Waiting for the logs..."
},
"preview": {
"need_during_buildtime": "Need during buildtime?",
"setup_secret_app_first": "You can add secrets to PR/MR deployments. Please add secrets to the application first. <br>Useful for creating <span class='text-green-500 font-bold'>staging</span> environments.",
"values_overwriting_app_secrets": "These values overwrite application secrets in PR/MR deployments. Useful for creating <span class='text-green-500 font-bold'>staging</span> environments.",
"redeploy": "Redeploy",
"no_previews_available": "No previews available"
},
"secrets": {
"secret_saved": "Secret saved.",
"use_isbuildsecret": "Use isBuildSecret",
"secrets_for": "Secrets for"
},
"storage": {
"path_is_required": "Path is required.",
"storage_saved": "Storage saved.",
"storage_updated": "Storage updated.",
"storage_deleted": "Storage deleted.",
"persistent_storage_explainer": "You can specify any folder that you want to be persistent across deployments. <br>This is useful for storing data such as a database (SQLite) or a cache."
},
"deployment_queued": "Deployment queued.",
"confirm_to_delete": "Are you sure you would like to delete '{{name}}'?",
"stop_application": "Stop application",
"permission_denied_stop_application": "You do not have permission to stop the application.",
"rebuild_application": "Rebuild application",
"permission_denied_rebuild_application": "You do not have permission to rebuild application.",
"build_and_start_application": "Build and start application",
"permission_denied_build_and_start_application": "You do not have permission to Build and start application.",
"configurations": "Configurations",
"secret": "Secrets",
"persistent_storage": "Persistent Storage",
"previews": "Previews",
"logs": "Application Logs",
"build_logs": "Build Logs",
"delete_application": "Delete application",
"permission_denied_delete_application": "You do not have permission to delete this application",
"domain_already_in_use": "Domain {{domain}} is already used.",
"dns_not_set_error": "DNS not set correctly or propogated for {{domain}}.<br><br>Please check your DNS settings.",
"domain_required": "Domain is required.",
"settings_saved": "Settings saved.",
"dns_not_set_partial_error": "DNS not set",
"domain_not_valid": "Could not resolve domain or it's not pointing to the server IP address.<br><br>Please check your DNS configuration and try again.",
"git_source": "Git Source",
"git_repository": "Git Repository",
"build_pack": "Build Pack",
"base_image": "Deployment Image",
"base_image_explainer": "Image that will be used for the deployment.",
"base_build_image": "Build Image",
"base_build_image_explainer": "Image that will be used during the build process.",
"destination": "Destination",
"application": "Application",
"url_fqdn": "URL (FQDN)",
"domain_fqdn": "Domain (FQDN)",
"https_explainer": "If you specify <span class='text-green-500 font-bold'>https</span>, the application will be accessible only over https. SSL certificate will be generated for you.<br>If you specify <span class='text-green-500 font-bold'>www</span>, the application will be redirected (302) from non-www and vice versa.<br><br>To modify the domain, you must first stop the application.<br><br><span class='text-white font-bold'>You must set your DNS to point to the server IP in advance.</span>",
"ssl_www_and_non_www": "Generate SSL for www and non-www?",
"ssl_explainer": "It will generate certificates for both www and non-www. <br>You need to have <span class='font-bold text-green-500'>both DNS entries</span> set in advance.<br><br>Useful if you expect to have visitors on both.",
"install_command": "Install Command",
"build_command": "Build Command",
"start_command": "Start Command",
"directory_to_use_explainer": "Directory to use as the base for all commands.<br>Could be useful with <span class='text-green-500 font-bold'>monorepos</span>.",
"publish_directory_explainer": "Directory containing all the assets for deployment. <br> For example: <span class='text-green-500 font-bold'>dist</span>,<span class='text-green-500 font-bold'>_site</span> or <span class='text-green-500 font-bold'>public</span>.",
"features": "Features",
"enable_automatic_deployment": "Enable Automatic Deployment",
"enable_auto_deploy_webhooks": "Enable automatic deployment through webhooks.",
"enable_mr_pr_previews": "Enable MR/PR Previews",
"expose_a_port": "Expose a port",
"enable_preview_deploy_mr_pr_requests": "Enable preview deployments from pull or merge requests.",
"debug_logs": "Debug Logs",
"enable_debug_log_during_build": "Enable debug logs during build phase.<br><span class='text-red-500 font-bold'>Sensitive information</span> could be visible and saved in logs.",
"cant_activate_auto_deploy_without_repo": "Cannot activate automatic deployments until only one application is defined for this repository / branch.",
"no_applications_found": "No applications found",
"secret__batch_dot_env": "Paste .env file",
"batch_secrets": "Batch add secrets"
},
"general": "General",
"database": {
"default_database": "Default Database",
"generated_automatically_after_set_to_public": "Generated automatically after set to public",
"connection_string": "Connection String",
"set_public": "Set it public",
"warning_database_public": "Your database will be reachable over the internet. <br>Take security seriously in this case!",
"change_append_only_mode": "Change append only mode",
"warning_append_only": "Useful if you would like to restore redis data from a backup.<br><span class='font-bold text-white'>Database restart is required.</span>",
"select_database_type": "Select a Database type",
"select_database_version": "Select a Database version",
"confirm_stop": "Are you sure you would like to stop {{name}}?",
"stop_database": "Stop database",
"permission_denied_stop_database": "You do not have permission to stop the database.",
"start_database": "Start database",
"permission_denied_start_database": "You do not have permission to start the database.",
"delete_database": "Delete Database",
"permission_denied_delete_database": "You do not have permission to delete a Database",
"no_databases_found": "No databases found",
"logs": "Database Logs"
},
"destination": {
"delete_destination": "Delete Destination",
"permission_denied_delete_destination": "You do not have permission to delete this destination",
"add_to_coolify": "Add to Coolify",
"coolify_proxy_stopped": "Coolify Proxy stopped!",
"coolify_proxy_started": "Coolify Proxy started!",
"confirm_restart_proxy": "Are you sure you want to restart the proxy? Everything will be reconfigured in ~10 secs.",
"coolify_proxy_restarting": "Coolify Proxy restarting...",
"restarting_please_wait": "Restarting... please wait...",
"force_restart_proxy": "Force restart proxy",
"use_coolify_proxy": "Use Coolify Proxy?",
"no_destination_found": "No destination found",
"new_error_network_already_exists": "Network {{network}} already configured for another team!",
"new": {
"saving_and_configuring_proxy": "Saving and configuring proxy...",
"install_proxy": "This will install a proxy on the destination to allow you to access your applications and services without any manual configuration (recommended for Docker).<br><br>Databases will have their own proxy.",
"add_new_destination": "Add New Destination",
"predefined_destinations": "Predefined destinations"
}
},
"sources": {
"local_docker": "Local Docker",
"remote_docker": "Remote Docker",
"organization_explainer": "Fill it if you would like to use an organization's as your Git Source. Otherwise your user will be used."
},
"source": {
"new": {
"git_source": "Add New Git Source",
"official_providers": "Official providers"
},
"no_git_sources_found": "No git sources found",
"delete_git_source": "Delete Git Source",
"permission_denied": "You do not have permission to delete a Git Source",
"create_new_app": "Create new {{name}} App",
"change_app_settings": "Change {{name}} App Settings",
"install_repositories": "Install Repositories",
"application_id": "Application ID",
"group_name": "Group Name",
"oauth_id": "OAuth ID",
"oauth_id_explainer": "The OAuth ID is the unique identifier of the GitLab application. <br>You can find it <span class='font-bold text-orange-600' >in the URL</span> of your GitLab OAuth Application.",
"register_oauth_gitlab": "Register new OAuth application on GitLab",
"gitlab": {
"self_hosted": "Instance-wide application (self-hosted)",
"user_owned": "User owned application",
"group_owned": "Group owned application",
"gitlab_application_type": "GitLab Application Type",
"already_configured": "GitLab App is already configured."
},
"github": {
"redirecting": "Redirecting to Github..."
}
},
"services": {
"all_email_verified": "All email verified. You can login now.",
"generate_www_non_www_ssl": "It will generate certificates for both www and non-www. <br>You need to have <span class='font-bold text-pink-600'>both DNS entries</span> set in advance.<br><br>Service needs to be restarted."
},
"service": {
"stop_service": "Stop Service",
"permission_denied_stop_service": "You do not have permission to stop the service.",
"start_service": "Start Service",
"permission_denied_start_service": "You do not have permission to start the service.",
"delete_service": "Delete Service",
"permission_denied_delete_service": "You do not have permission to delete a service.",
"no_service": "No services found",
"logs": "Service Logs"
},
"setting": {
"change_language": "Change Language",
"permission_denied": "You do not have permission to do this. \\nAsk an admin to modify your permissions.",
"domain_removed": "Domain removed",
"ssl_explainer": "If you specify <span class='text-yellow-500 font-bold'>https</span>, Coolify will be accessible only over https. SSL certificate will be generated for you.<br>If you specify <span class='text-yellow-500 font-bold'>www</span>, Coolify will be redirected (302) from non-www and vice versa.<br><br><span class='text-yellow-500 font-bold'>WARNING:</span> If you change an already set domain, it will brake webhooks and other integrations! You need to manually update them.",
"must_remove_domain_before_changing": "Must remove the domain before you can change this setting.",
"registration_allowed": "Registration allowed?",
"registration_allowed_explainer": "Allow further registrations to the application. <br>It's turned off after the first registration.",
"coolify_proxy_settings": "Coolify Proxy Settings",
"credential_stat_explainer": "Credentials for <a class=\"text-white font-bold\" href=\"{{link}}\" target=\"_blank\">stats</a> page.",
"auto_update_enabled": "Auto update enabled?",
"auto_update_enabled_explainer": "Enable automatic updates for Coolify. It will be done automatically behind the scenes, if there is no build process running.",
"generate_www_non_www_ssl": "It will generate certificates for both www and non-www. <br>You need to have <span class='font-bold text-yellow-500'>both DNS entries</span> set in advance.",
"is_dns_check_enabled": "DNS check enabled?",
"is_dns_check_enabled_explainer": "You can disable DNS check before creating SSL certificates.<br><br>Turning it off is useful when Coolify is behind a reverse proxy or tunnel."
},
"team": {
"pending_invitations": "Pending invitations",
"accept": "Accept",
"delete": "Delete",
"member": "member(s)",
"root": "(root)",
"invited_with_permissions": "Invited to <span class=\"font-bold text-pink-600\">{{teamName}}</span> with <span class=\"font-bold text-rose-600\">{{permission}}</span> permission.",
"members": "Members",
"root_team_explainer": "This is the <span class='text-red-500 font-bold'>root</span> team. That means members of this group can manage instance wide settings and have all the priviliges in Coolify (imagine like root user on Linux).",
"permission": "Permission",
"you": "(You)",
"promote_to": "Promote to {{grade}}",
"revoke_invitation": "Revoke invitation",
"pending_invitation": "Pending invitation",
"invite_new_member": "Invite new member",
"send_invitation": "Send invitation",
"invite_only_register_explainer": "You can only invite registered users at the moment - will be extended soon.",
"admin": "Admin",
"read": "Read"
}
}

323
src/lib/locales/fr.json Normal file
View File

@@ -0,0 +1,323 @@
{
"application": {
"application": "Application",
"build": {
"build_logs_of": "Créer des journaux de",
"finished_in": "Fini en",
"load_more": "Charger plus",
"no_logs": "Aucun journal trouvé",
"queued": "En file d'attente",
"queued_waiting_exec": "En file d'attente et en attente d'exécution.",
"running": "Fonctionnement",
"waiting_logs": "En attente des logs..."
},
"build_and_start_application": "Build et démarrer l'application",
"build_command": "Commande Build",
"build_logs": "Créer des journaux",
"build_pack": "Pack de Build",
"cant_activate_auto_deploy_without_repo": "Impossible d'activer les déploiements automatiques tant qu'une seule application n'est pas définie pour ce dépôt/branche.",
"configuration": {
"branch_already_in_use": "Cette branche est déjà utilisée par une autre application. \nLes webhooks ne fonctionneront pas dans ce cas pour les deux applications. \nÊtes-vous sûr de vouloir l'utiliser ?",
"buildpack": {
"choose_this_one": "Choisir celui-ci..."
},
"configuration_missing": "Configuration manquante",
"configure_build_pack": "Configurer le pack de build",
"configure_destination": "Configurer la destination",
"configure_it_now": "Configurez-le maintenant",
"found_lock_file": "Fichier .lock trouvé pour <span class=\"font-bold text-orange-500 pl-1\">{{packageManager}}</span>. \nL'utiliser pour les commandes prédéfinies.",
"loading_branches": "Chargement des branches...",
"loading_groups": "Chargement des groupes...",
"loading_projects": "Chargement des projets...",
"loading_repositories": "Chargement des dépôts Git...",
"no_branches_found": "Aucune branche trouvée",
"no_configurable_destination": "Aucune destination configurable trouvée",
"no_configurable_git": "Aucune source Git configurable trouvée",
"no_projects_found": "Aucun projet trouvé",
"no_repositories_configured": "Aucun dépôt Git configuré pour votre application.",
"scanning_repository_suggest_build_pack": "Analyse du dépôt pour vous suggérer un pack de Build...",
"select_a_branch": "Veuillez sélectionner une branche",
"select_a_git_source": "Sélectionnez une source Git",
"select_a_group": "Veuillez sélectionner un groupe",
"select_a_project": "Veuillez sélectionner un projet",
"select_a_repository": "Veuillez sélectionner un dépôt",
"select_a_repository_first": "Veuillez d'abord sélectionner un dépôt",
"select_a_repository_project": "Sélectionnez un dépôt / projet"
},
"configurations": "Configurations",
"confirm_to_delete": "Voulez-vous vraiment supprimer '{{name}}'?",
"debug_logs": "Journaux de débogage",
"delete_application": "Supprimer l'application",
"deployment_queued": "Déploiement en file d'attente.",
"destination": "Destination",
"directory_to_use_explainer": "Répertoire à utiliser comme base pour toutes les commandes.<br>Pourrait être utile avec <span class='text-green-500 font-bold'>monorepos</span>.",
"dns_not_set_error": "DNS non défini ou propagé pour {{domain}}.<br><br>Veuillez vérifier vos paramètres DNS.",
"dns_not_set_partial_error": "DNS non défini",
"domain_already_in_use": "Le domaine {{domain}} est déjà utilisé.",
"domain_fqdn": "Domaine (FQDN)",
"url_fqdn": "URL (FQDN)",
"enable_auto_deploy_webhooks": "Activez le déploiement automatique via des webhooks.",
"enable_automatic_deployment": "Activer le déploiement automatique",
"enable_debug_log_during_build": "Activez les journaux de débogage pendant la phase de build.<br><span class='text-red-500 font-bold'>Les informations sensibles</span> peuvent être visibles et enregistrées dans les journaux.",
"enable_mr_pr_previews": "Activer les aperçus MR/PR",
"enable_preview_deploy_mr_pr_requests": "Activez les déploiements de prévisualisation à partir de demandes d'extraction ou de fusion.",
"expose_a_port": "Exposer un port",
"features": "Caractéristiques",
"git_repository": "Dépôt Git",
"git_source": "Source Git",
"https_explainer": "Si vous spécifiez <span class='text-green-500 font-bold'>https</span>, l'application sera accessible uniquement via https. \nUn certificat SSL sera généré pour vous.<br>Si vous spécifiez <span class='text-green-500 font-bold'>www</span>, l'application sera redirigée (302) à partir de non-www et vice versa \n.<br><br>Pour modifier le domaine, vous devez d'abord arrêter l'application.<br><br><span class='text-white font-bold'>Vous devez configurer, en avance, votre DNS pour pointer vers l'IP du serveur.</span>",
"install_command": "Commande d'installation",
"logs": "Journaux des applications",
"no_applications_found": "Aucune application trouvée",
"permission_denied_build_and_start_application": "Vous n'êtes pas autorisé à créer et à démarrer l'application.",
"permission_denied_delete_application": "Vous n'êtes pas autorisé à supprimer cette application",
"permission_denied_rebuild_application": "Vous n'êtes pas autorisé à re-build l'application.",
"permission_denied_stop_application": "Vous n'êtes pas autorisé à arrêter l'application.",
"persistent_storage": "Stockage persistant",
"preview": {
"need_during_buildtime": "Besoin pendant la build ?",
"no_previews_available": "Aucun aperçu disponible",
"redeploy": "Redéployer",
"setup_secret_app_first": "Vous pouvez ajouter des secrets aux déploiements PR/MR. \nVeuillez d'abord ajouter des secrets à l'application. \n<br>Utile pour créer des environnements <span class='text-green-500 font-bold'>de mise en scène</span>.",
"values_overwriting_app_secrets": "Ces valeurs remplacent les secrets d'application dans les déploiements PR/MR. \nUtile pour créer des environnements <span class='text-green-500 font-bold'>de mise en scène</span>."
},
"previews": "Aperçus",
"publish_directory_explainer": "Répertoire contenant tous les actifs à déployer. \n<br> Par exemple : <span class='text-green-500 font-bold'>dist</span>,<span class='text-green-500 font-bold'>_site</span> ou <span \nclass='text-green-500 font-bold'>public</span>.",
"rebuild_application": "Re-build l'application",
"secret": "secrets",
"secrets": {
"secret_saved": "Secret enregistré.",
"secrets_for": "secrets pour",
"use_isbuildsecret": "Utiliser isBuildSecret"
},
"settings_saved": "Paramètres sauvegardés.",
"ssl_explainer": "Il générera des certificats pour www et non-www. \n<br>Vous devez avoir <span class='font-bold text-green-500'>les deux entrées DNS</span> définies à l'avance.<br><br>Utile si vous prévoyez d'avoir des visiteurs sur les deux.",
"ssl_www_and_non_www": "Générer SSL pour www et non-www ?",
"start_command": "Démarrer la commande",
"stop_application": "Arrêter l'application",
"storage": {
"path_is_required": "Le chemin est requis.",
"persistent_storage_explainer": "Vous pouvez spécifier n'importe quel dossier que vous souhaitez conserver dans les déploiements. \n<br>Ceci est utile pour stocker des données telles qu'une base de données (SQLite) ou un cache.",
"storage_deleted": "Stockage supprimé.",
"storage_saved": "Stockage enregistré.",
"storage_updated": "Stockage mis à jour."
}
},
"database": {
"change_append_only_mode": "Changer le mode d'ajout uniquement",
"confirm_stop": "Êtes-vous sûr de vouloir arrêter {{name}} ?",
"connection_string": "Connexion string",
"default_database": "Base de données par défaut",
"delete_database": "Supprimer la base de données",
"generated_automatically_after_set_to_public": "Généré automatiquement après avoir été défini sur public",
"no_databases_found": "Aucune base de données trouvée",
"permission_denied_delete_database": "Vous n'êtes pas autorisé à supprimer une base de données",
"permission_denied_start_database": "Vous n'êtes pas autorisé à démarrer la base de données.",
"permission_denied_stop_database": "Vous n'êtes pas autorisé à arrêter la base de données.",
"select_database_type": "Sélectionnez un type de base de données",
"select_database_version": "Sélectionnez une version de la base de données",
"set_public": "Rendre public",
"start_database": "Démarrer la base de données",
"stop_database": "Arrêter la base de données",
"warning_append_only": "Utile si vous souhaitez restaurer des données Redis à partir d'une sauvegarde.<br><span class='font-bold text-white'>Le redémarrage de la base de données est nécessaire.</span>",
"warning_database_public": "Votre base de données sera accessible depuis Internet. \n<br>Prenez la sécurité au sérieux dans ce cas!"
},
"destination": {
"add_to_coolify": "Ajouter à Coolify",
"confirm_restart_proxy": "Voulez-vous vraiment redémarrer le proxy? \nTout sera reconfiguré en ~10 secondes.",
"coolify_proxy_restarting": "Redémarrage du Proxy Coolify...",
"coolify_proxy_started": "Proxy Coolify démarré!",
"coolify_proxy_stopped": "Proxy Coolify arrêté!",
"delete_destination": "Supprimer le destinataire",
"force_restart_proxy": "Forcer le redémarrage du proxy",
"new": {
"add_new_destination": "Ajouter une nouvelle destination",
"install_proxy": "Cela installera un proxy sur la destination pour vous permettre d'accéder à vos applications et services sans aucune configuration manuelle (recommandé pour Docker).<br><br>Les bases de données auront leur propre proxy.",
"predefined_destinations": "Destinations prédéfinies",
"saving_and_configuring_proxy": "Enregistrement et configuration du proxy..."
},
"new_error_network_already_exists": "Réseau {{network}} déjà configuré pour une autre équipe !",
"no_destination_found": "Aucune destination trouvée",
"permission_denied_delete_destination": "Vous n'êtes pas autorisé à supprimer cette destination",
"restarting_please_wait": "Redémarrage... veuillez patienter...",
"use_coolify_proxy": "Utiliser le Proxy Coolify ?"
},
"error": {
"here": "ici",
"you_are_lost": "Oups vous êtes perdu ! \nMais n'ayez pas peur !",
"you_can_find_your_way_back": "Tu peux retrouver ton chemin"
},
"forms": {
"action": "action",
"add": "Ajouter",
"already_used_for": "<span class=\"text-red-500\">{{type}}</span> déjà utilisé pour",
"api_port": "Port API",
"api_url": "URL de l'API",
"base_directory": "Répertoire de base",
"configuration": "Configuration",
"confirm_continue": "Êtes-vous sûr de continuer ?",
"default": "défaut",
"default_email_address": "Adresse e-mail par défaut",
"default_password": "Mot de passe par défaut",
"eg": "ex",
"email": "Adresse e-mail",
"engine": "Moteur",
"extra_config": "Configuration supplémentaire",
"generated_automatically_after_start": "Généré automatiquement après le démarrage",
"host": "Hôte",
"html_url": "URL HTML",
"ip_address": "Adresse IP",
"is_required": "est requis.",
"loading": "Chargement...",
"must_be_stopped_to_modify": "Doit être arrêté pour être modifié.",
"name": "Nom",
"network": "Réseau",
"new_password": "Nouveau mot de passe",
"no_actions_available": "Aucune action disponible",
"organization": "Organisation",
"password": "Mot de passe",
"password_again": "Mot de passe à nouveau",
"passwords_not_match": "Les mots de passe ne correspondent pas.",
"path": "Chemin",
"port": "Port",
"public_port_range": "Gamme de ports publics",
"public_port_range_explainer": "Ports utilisés pour exposer les bases de données/services/services internes.<br> Ajoutez-les à votre pare-feu (le cas échéant).<br><br>Vous pouvez spécifier une plage de ports, par exemple : <span class='text-yellow-500 \nfont-bold'>9000-9100</span>",
"publish_directory": "Publier le répertoire",
"remove": "Retirer",
"remove_domain": "Supprimer le domaine",
"removing": "Suppression...",
"root_db_password": "Mot de passe root de la base de données",
"root_db_user": "Utilisateur root de la base de données",
"root_user": "Utilisateur root",
"roots_password": "Mot de passe de l'utilisateur root",
"save": "sauvegarder",
"saving": "Sauvegarde...",
"select_a_service": "Sélectionnez un service",
"select_a_service_version": "Sélectionnez une version de service",
"set": "Régler",
"ssh_private_key": "Clé privée SSH",
"submit": "Nous faire parvenir",
"super_secure_new_password": "Nouveau mot de passe super sécurisé",
"type": "Taper",
"user": "Utilisateur",
"username": "Nom d'utilisateur",
"value": "Valeur",
"verify_emails_without_smtp": "Vérifier les e-mails sans SMTP",
"verifying": "Vérification",
"version": "Version"
},
"general": "Général",
"index": {
"applications": "Applications",
"dashboard": "Tableau de bord",
"database": "Base de données",
"databases": "Bases de données",
"destinations": "Destinations",
"git_sources": "Sources Git",
"global_settings": "Paramètres globaux",
"logout": "Se déconnecter",
"not_implemented_yet": "Pas encore implémenté",
"secret": "Secret",
"services": "Services",
"settings": "Réglages",
"team": "Équipe",
"teams": "Équipes"
},
"layout": {
"new_version": "Nouvelle version accessible. \nRechargement...",
"switch_to_a_different_team": "Changer d'équipe...",
"update_available": "Mise à jour disponible",
"update_done": "Mise à jour terminée.",
"wait_new_version_startup": "En attendant le lancement de la nouvelle version..."
},
"login": {
"already_logged_in": "Déjà connecté...",
"authenticating": "Authentification...",
"login": "Connexion"
},
"register": {
"first_user": "Vous enregistrez le premier utilisateur. \nCe sera l'administrateur de votre instance Coolify.",
"register": "S'inscrire"
},
"reset": {
"find_path_secret_key": "Vous pouvez le trouver dans ~/coolify/.env (COOLIFY_SECRET_KEY)",
"invalid_secret_key": "Clé secrète invalide.",
"reset_password": "Réinitialiser",
"secret_key": "Clef secrète"
},
"service": {
"delete_service": "Supprimer le service",
"no_service": "Aucun service trouvé",
"permission_denied_delete_service": "Vous n'êtes pas autorisé à supprimer un service.",
"permission_denied_start_service": "Vous n'êtes pas autorisé à démarrer le service.",
"permission_denied_stop_service": "Vous n'êtes pas autorisé à arrêter le service.",
"start_service": "Démarrer le service",
"stop_service": "Stopper le service"
},
"services": {
"all_email_verified": "Tous les e-mails sont vérifiés. \nVous pouvez vous connecter maintenant.",
"generate_www_non_www_ssl": "Il générera des certificats pour www et non-www. \n<br>Vous devez avoir <span class='font-bold text-pink-600'>les deux entrées DNS</span> définies à l'avance.<br><br>Le service devra être redémarré."
},
"setting": {
"coolify_proxy_settings": "Paramètres du proxy Coolify",
"credential_stat_explainer": "Identifiants pour la page <a class=\"text-white font-bold\" href=\"{{link}}\" target=\"_blank\">statistiques</a>.",
"domain_removed": "Domaine supprimé",
"must_remove_domain_before_changing": "Vous devez supprimer le domaine avant de pouvoir modifier ce paramètre.",
"permission_denied": "Vous n'avez pas la permission de faire cela. \n\\nDemandez à un administrateur de modifier vos autorisations.",
"registration_allowed": "Inscription autorisée ?",
"registration_allowed_explainer": "Autoriser d'autres inscriptions à l'application. \n<br>Il est désactivé après la première inscription.",
"ssl_explainer": "Si vous spécifiez <span class='text-yellow-500 font-bold'>https</span>, Coolify sera accessible uniquement via https. \nUn certificat SSL sera généré pour vous.<br>Si vous spécifiez <span class='text-yellow-500 font-bold'>www</span>, Coolify sera redirigé (302) à partir de non-www et vice versa."
},
"source": {
"application_id": "ID d'application",
"change_app_settings": "Modifier les paramètres de l'application {{name}}",
"create_new_app": "Créer une nouvelle application {{name}}",
"delete_git_source": "Supprimer la source Git",
"github": {
"redirecting": "Redirection vers Github..."
},
"gitlab": {
"already_configured": "L'application GitLab est déjà configurée.",
"gitlab_application_type": "Type d'application GitLab",
"group_owned": "Application détenue par le groupe",
"self_hosted": "Application à l'échelle de l'instance (auto-hébergée)",
"user_owned": "Application appartenant à l'utilisateur"
},
"group_name": "Nom de groupe",
"install_repositories": "Installer les dépôts",
"new": {
"git_source": "Ajouter une nouvelle source Git",
"official_providers": "Fournisseurs officiels"
},
"no_git_sources_found": "Aucune source git trouvée",
"oauth_id": "ID OAuth",
"oauth_id_explainer": "L'identifiant OAuth est l'identifiant unique de l'application GitLab. \n<br>Vous pouvez le trouver <span class='font-bold text-orange-600' >dans l'URL</span> de votre application GitLab OAuth.",
"permission_denied": "Vous n'êtes pas autorisé à supprimer une source Git",
"register_oauth_gitlab": "Enregistrer une nouvelle application OAuth sur GitLab"
},
"sources": {
"local_docker": "Docker local",
"organization_explainer": "Remplissez-le si vous souhaitez utiliser une organisation comme source Git. \nSinon, votre utilisateur sera utilisé.",
"remote_docker": "Station d'accueil à distance"
},
"team": {
"accept": "J'accepte",
"admin": "Administrateur",
"delete": "Supprimer",
"invite_new_member": "Inviter un nouveau membre",
"invite_only_register_explainer": "Vous ne pouvez inviter que des utilisateurs enregistrés pour le moment - sera bientôt prolongé.",
"invited_with_permissions": "Invité à <span class=\"font-bold text-pink-600\">{{teamName}}</span> avec <span class=\"font-bold text-rose-600\">{{permission}}</span \n> autorisation.",
"member": "membre(s)",
"members": "Membres",
"pending_invitation": "Invitation en attente",
"pending_invitations": "Invitations en attente",
"permission": "Autorisation",
"promote_to": "Promouvoir à {{grade}}",
"read": "Lire",
"revoke_invitation": "Révoquer l'invitation",
"root": "(suprême)",
"root_team_explainer": "Il s'agit de l'équipe <span class='text-red-500 font-bold'>suprême</span>. \nCela signifie que les membres de ce groupe peuvent gérer les paramètres à l'échelle de l'instance et avoir tous les privilèges dans Coolify (imaginez comme un utilisateur root sous Linux).",
"send_invitation": "Envoyer une invitation",
"you": "(Toi)"
}
}

View File

@@ -0,0 +1,42 @@
import { prisma } from '$lib/database';
import { buildQueue } from '.';
import got from 'got';
import { asyncExecShell, version } from '$lib/common';
import compare from 'compare-versions';
import { dev } from '$app/env';
export default async function (): Promise<void> {
try {
const currentVersion = version;
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
const versions = await got
.get(
`https://get.coollabs.io/versions.json?appId=${process.env['COOLIFY_APP_ID']}&version=${currentVersion}`
)
.json();
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compare(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = await buildQueue.getActiveCount();
if (activeCount === 0) {
if (!dev) {
await buildQueue.pause();
console.log(`Updating Coolify to ${latestVersion}.`);
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-redis && docker rm coolify coolify-redis && docker compose up -d --force-recreate"`
);
} else {
await buildQueue.pause();
console.log('Updating (not really in dev mode).');
}
}
}
}
} catch (error) {
await buildQueue.resume();
console.log(error);
}
}

View File

@@ -20,27 +20,22 @@ import {
setDefaultConfiguration
} from '$lib/buildPacks/common';
import yaml from 'js-yaml';
import type { Job } from 'bullmq';
import type { BuilderJob } from '$lib/types/builderJob';
import type { ComposeFile } from '$lib/types/composeFile';
export default async function (job) {
let {
export default async function (job: Job<BuilderJob, void, string>): Promise<void> {
const {
id: applicationId,
repository,
branch,
buildPack,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
port,
installCommand,
buildCommand,
startCommand,
fqdn,
baseDirectory,
publishDirectory,
projectId,
secrets,
phpModules,
@@ -51,7 +46,23 @@ export default async function (job) {
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage
} = job.data;
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = job.data;
const { debug } = settings;
@@ -67,7 +78,7 @@ export default async function (job) {
});
let imageId = applicationId;
let domain = getDomain(fqdn);
let volumes =
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${
buildPack !== 'docker' ? '/app' : ''
@@ -102,8 +113,10 @@ export default async function (job) {
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
let commit = await importers[gitSource.type]({
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
@@ -140,6 +153,7 @@ export default async function (job) {
JSON.stringify({
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
@@ -176,7 +190,7 @@ export default async function (job) {
//
}
if (!imageFound || deployNeeded) {
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId);
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (buildpacks[buildPack])
await buildpacks[buildPack]({
buildId,
@@ -195,7 +209,7 @@ export default async function (job) {
tag,
workdir,
docker,
port,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
@@ -204,15 +218,18 @@ export default async function (job) {
phpModules,
pythonWSGI,
pythonModule,
pythonVariable
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
deployNeeded = true;
} else {
deployNeeded = false;
await saveBuildLog({ line: 'Nothing changed.', buildId, applicationId });
}
@@ -248,7 +265,7 @@ export default async function (job) {
repository,
branch,
projectId,
port,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
@@ -282,7 +299,19 @@ export default async function (job) {
networks: [docker.network],
labels,
depends_on: [],
restart: 'always'
restart: 'always',
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
deploy: {
restart_policy: {
condition: 'on-failure',
delay: '5s',
max_attempts: 3,
window: '120s'
}
}
}
},
networks: {
@@ -299,7 +328,7 @@ export default async function (job) {
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
sentry.captureException(error);
// sentry.captureException(error);
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });

View File

@@ -1,71 +1,75 @@
import { dev } from '$app/env';
import { asyncExecShell, getEngine, version } from '$lib/common';
import { prisma } from '$lib/database';
import { defaultProxyImageHttp, defaultProxyImageTcp } from '$lib/haproxy';
export default async function () {
export default async function (): Promise<void> {
const destinationDockers = await prisma.destinationDocker.findMany();
for (const destinationDocker of destinationDockers) {
const host = getEngine(destinationDocker.engine);
// Cleanup old coolify images
const engines = [...new Set(destinationDockers.map(({ engine }) => engine))];
for (const engine of engines) {
let lowDiskSpace = false;
const host = getEngine(engine);
try {
let { stdout: images } = await asyncExecShell(
`DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs `
const { stdout } = await asyncExecShell(
`DOCKER_HOST=${host} docker exec coolify sh -c 'df -kPT /'`
);
images = images.trim();
if (images) {
await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`);
let lines = stdout.trim().split('\n');
let header = lines[0];
let regex =
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
const boundaries = [];
let match;
while ((match = regex.exec(header))) {
boundaries.push(match[0].length);
}
boundaries[boundaries.length - 1] = -1;
const data = lines.slice(1).map((line) => {
const cl = boundaries.map((boundary) => {
const column = boundary > 0 ? line.slice(0, boundary) : line;
line = line.slice(boundary);
return column.trim();
});
return {
capacity: Number.parseInt(cl[5], 10) / 100
};
});
if (data.length > 0) {
const { capacity } = data[0];
if (capacity > 0.8) {
lowDiskSpace = true;
}
}
} catch (error) {
console.log(error);
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`);
} catch (error) {
console.log(error);
if (lowDiskSpace) {
// Cleanup old coolify images
try {
let { stdout: images } = await asyncExecShell(
`DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs `
);
images = images.trim();
if (images) {
await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`);
}
} catch (error) {
//console.log(error);
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`);
} catch (error) {
//console.log(error);
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f --filter "until=2h"`);
} catch (error) {
//console.log(error);
}
// Cleanup old images older than a day
try {
await asyncExecShell(`DOCKER_HOST=${host} docker image prune --filter "until=72h" -a -f`);
} catch (error) {
//console.log(error);
}
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f --filter "until=2h"`);
} catch (error) {
console.log(error);
}
// Tagging images with labels
// try {
// const images = [
// `coollabsio/${defaultProxyImageTcp}`,
// `coollabsio/${defaultProxyImageHttp}`,
// 'certbot/certbot:latest',
// 'node:16.14.0-alpine',
// 'alpine:latest',
// 'nginx:stable-alpine',
// 'node:lts',
// 'php:apache',
// 'rust:latest'
// ];
// for (const image of images) {
// try {
// await asyncExecShell(`DOCKER_HOST=${host} docker image inspect ${image}`);
// } catch (error) {
// await asyncExecShell(
// `DOCKER_HOST=${host} docker pull ${image} && echo "FROM ${image}" | docker build --label coolify.image="true" -t "${image}" -`
// );
// }
// }
// } catch (error) {}
// if (!dev) {
// // Cleanup images that are not managed by coolify
// try {
// await asyncExecShell(
// `DOCKER_HOST=${host} docker image prune --filter 'label!=coolify.image=true' -a -f`
// );
// } catch (error) {
// console.log(error);
// }
// // Cleanup old images >3 days
// try {
// await asyncExecShell(`DOCKER_HOST=${host} docker image prune --filter "until=72h" -a -f`);
// } catch (error) {
// console.log(error);
// }
// }
}
}

Some files were not shown because too many files have changed in this diff Show More