Compare commits

..

279 Commits

Author SHA1 Message Date
Andras Bacsai
d624f95cb9 Merge pull request #491 from coollabsio/next
v3.1.3
2022-07-18 10:11:19 +02:00
Andras Bacsai
bedbe46830 notify prod release as well 2022-07-18 08:08:48 +00:00
Andras Bacsai
7846bf1bc3 GH action send discord wb 2022-07-18 07:58:50 +00:00
Andras Bacsai
c28bc786ae staging release gh actions 2022-07-18 07:51:36 +00:00
Andras Bacsai
60a53bb812 fix: location url for services and apps 2022-07-18 07:45:29 +00:00
Andras Bacsai
017ba61dc9 fix: gitlab custom url 2022-07-18 07:26:32 +00:00
Andras Bacsai
b5d82dc87b fix: remove shared dir, deployment does not work 2022-07-15 15:40:26 +02:00
Andras Bacsai
866070b9ea feat: init moodle and separate stuffs to shared package 2022-07-15 12:31:54 +00:00
Andras Bacsai
49ecb15773 fix: remove new service start process 2022-07-15 09:18:16 +00:00
Andras Bacsai
047df77195 fix: gitpod urls 2022-07-15 08:00:32 +00:00
Andras Bacsai
0904d1ff9c fix: do not rebuild in case image exists and sha not changed 2022-07-15 08:00:19 +00:00
Andras Bacsai
7bf6041d8c fix: more types 2022-07-14 13:04:00 +00:00
Andras Bacsai
90597389c9 fix: more types for API 2022-07-14 12:47:26 +00:00
Andras Bacsai
039350d762 Merge pull request #490 from coollabsio/next
v3.1.2
2022-07-14 10:52:34 +02:00
Andras Bacsai
ce31146a9c fix: turn off autodeploy if double branch is configured 2022-07-14 08:00:57 +00:00
Andras Bacsai
344dd7db28 fix: message for double branches 2022-07-14 07:42:05 +00:00
Andras Bacsai
b735ca2da7 fix: admin password reset should not timeout 2022-07-14 07:22:26 +00:00
Andras Bacsai
cbdd098528 chore: version++ 2022-07-14 07:22:12 +00:00
Andras Bacsai
d5f9d57be9 Merge pull request #487 from coollabsio/next
v3.1.1
2022-07-13 15:43:46 +02:00
Andras Bacsai
d5f2d22663 fix: Cleanup less often and can do it manually 2022-07-13 13:40:41 +00:00
Andras Bacsai
9914686ed7 chore: version++ 2022-07-12 13:37:07 +00:00
Andras Bacsai
ee9acfe556 chore: version++ 2022-07-12 13:10:58 +00:00
Andras Bacsai
9f9d4297ca feat: Gitpod integration 2022-07-12 13:08:47 +00:00
Andras Bacsai
002637ec5f Merge pull request #486 from coollabsio:gitpod
Update .gitpod.yml
2022-07-12 13:35:10 +02:00
Andras Bacsai
c1238c6594 Update .gitpod.yml 2022-07-12 13:00:58 +02:00
Andras Bacsai
c43b848708 Merge pull request #482 from coollabsio/v3.0.4
v3.1.0
2022-07-12 11:35:43 +02:00
Andras Bacsai
e22950cecb fix: gitpod 2022-07-12 11:34:58 +02:00
Andras Bacsai
5a7edcb762 feat: custom port for git instances 2022-07-12 11:01:48 +02:00
Andras Bacsai
9b47de71fc fix: GitLab loop on misconfigured source 2022-07-12 10:19:34 +02:00
Andras Bacsai
8f9462245a feat: Add Docker buildpack exposed port setting 2022-07-12 09:58:59 +02:00
Andras Bacsai
f0ed51cd22 fix: GitHub App button 2022-07-12 09:58:37 +02:00
Andras Bacsai
99a7eff6ab fix: GitLab search fields 2022-07-12 09:58:27 +02:00
Andras Bacsai
edfed57df3 fix: Wordpress FTP permission issues 2022-07-12 09:25:16 +02:00
Andras Bacsai
a70e35cb79 fix: gitpod 2022-07-12 09:09:30 +02:00
Andras Bacsai
3da1b31363 fix: gitpod 2022-07-12 07:01:02 +00:00
Andras Bacsai
59bc2dd8a7 fix: gitpod updates 2022-07-12 08:56:32 +02:00
Andras Bacsai
7b6e7680a6 Merge pull request #484 from andrasbacsai/v3.0.4
feat: Gitpod ready code(almost)
2022-07-11 22:35:37 +02:00
Andras Bacsai
1c65df282e feat: Gitpod ready code(almost) 2022-07-11 20:32:27 +00:00
Andras Bacsai
77ae070c98 fix: ftp?! 2022-07-08 17:52:51 +02:00
Andras Bacsai
20708f1456 fix: ftp WP issues 2022-07-08 17:47:19 +02:00
Andras Bacsai
9a1a67a4ef chore: version++ 2022-07-08 17:22:35 +02:00
Andras Bacsai
3a8e5df897 feat: Ability to change deployment type for nuxtjs 2022-07-08 17:21:23 +02:00
Andras Bacsai
88a62be30c feat: Ability to change deployment type for nextjs 2022-07-08 16:51:58 +02:00
Andras Bacsai
c478c1b7ad fix: wp missing ftp solution 2022-07-08 15:12:53 +02:00
Andras Bacsai
18c2b2e38e fix: Service domain checker 2022-07-08 14:59:59 +02:00
Andras Bacsai
b105e6fbf8 fix: GitLab pagination load data 2022-07-08 14:36:41 +02:00
Andras Bacsai
da11bae67c fix: do not run cleanup and build parallel
fix: UI error toasts
2022-07-08 14:11:18 +02:00
Andras Bacsai
d344a9bb4f Merge pull request #479 from ArticaDev/fix/deno-buildpack
Changing Deno buildpack to not run main before copying directory
2022-07-08 13:42:40 +02:00
Andras Bacsai
222adb212b fix: permission issues
fix: white labaled version
2022-07-08 13:38:19 +02:00
Marcos Nery
cb01bbe4ac fixing deno buldpack to not run main before copying directory 2022-07-06 23:50:15 -03:00
Andras Bacsai
c63237684a fix: Hostname issue 2022-07-06 20:04:16 +02:00
Andras Bacsai
792d51d93f fix: trustProxy for Fastify 2022-07-06 19:34:16 +02:00
Andras Bacsai
62bfb5dacc fix: Domain check 2022-07-06 19:04:54 +02:00
Andras Bacsai
d7fa80703d fix: Domain check 2022-07-06 19:04:40 +02:00
Andras Bacsai
52b712d90b fix: new destinations 2022-07-06 16:42:31 +02:00
Andras Bacsai
331e13b7cb fix: include post 2022-07-06 16:13:11 +02:00
Andras Bacsai
64bb4a2525 fix: new destination can be created 2022-07-06 15:52:00 +02:00
Andras Bacsai
31d7e7e806 fix: forgot that the version bump changed 😅 2022-07-06 13:46:00 +02:00
Andras Bacsai
e740788d6c chore: version++ 2022-07-06 11:49:53 +02:00
Andras Bacsai
928d53e532 fix: Seeding 2022-07-06 11:49:07 +02:00
Andras Bacsai
87ba4560ad v3.0.0 (#476)
* New Version: 3.0.0
2022-07-06 11:02:36 +02:00
Andras Bacsai
9137e8bc32 Merge pull request #469 from coollabsio/v2.9.11
v2.9.11
2022-06-20 20:48:30 +02:00
Andras Bacsai
35bd2b23d5 fix: lock file 2022-06-20 20:47:33 +02:00
Andras Bacsai
10a514d9ac fix: be able to change database + service versions 2022-06-20 20:47:10 +02:00
Andras Bacsai
71096acdff chore: version++ 2022-06-17 09:11:03 +02:00
Andras Bacsai
07da696397 Merge pull request #463 from execreate/main
Upgrade PSQL version to fix an issue with indices corruption
2022-06-17 09:10:40 +02:00
Jorilla Abdullaev
41baf150c2 upgrade PSQL version to fix an issue with CREATE INDEX CONCURRENTLY and REINDEX CONCURRENTLY that could cause silent data corruption of indexes 2022-06-17 11:08:47 +05:00
Andras Bacsai
f0a52b2ef4 Merge pull request #460 from coollabsio/scshiv29-dev/main
v2.9.9
2022-06-10 10:45:55 +02:00
Andras Bacsai
54e83fdff1 fix: remove package-lock 2022-06-10 10:45:32 +02:00
Andras Bacsai
46327ff2fc chore: version++ 2022-06-10 10:44:15 +02:00
Andras Bacsai
8b26acc841 fix: host and reload for uvicorn 2022-06-10 10:42:58 +02:00
Andras Bacsai
b90cb5a731 Merge pull request #420 from scshiv29-dev/main
Uvicorn ASGI for Python #388
2022-06-10 10:16:26 +02:00
Andras Bacsai
cd9b642c5e Merge branch 'feat/python' into main 2022-06-10 10:15:54 +02:00
Andras Bacsai
41a928d41b Merge branch 'main' into scshiv29-dev/main 2022-06-10 10:14:50 +02:00
Andras Bacsai
1388bee62c Merge pull request #459 from pucilpet/pucilpet-patch-1
Removed space from the version number
2022-06-10 08:51:36 +02:00
Petteri Pucilowski
8ebc778d40 Removed space from the version number 2022-06-10 01:30:46 +03:00
Andras Bacsai
3a59091b41 fix: nocodb persistency 2022-06-09 19:44:41 +02:00
Andras Bacsai
e764c4651c Merge pull request #454 from coollabsio/next
v2.9.8
2022-06-09 15:50:30 +02:00
Andras Bacsai
10f04d2177 fix: persistent nocodb 2022-06-09 15:49:21 +02:00
Andras Bacsai
119f994b50 chore: version++ 2022-06-09 15:49:07 +02:00
Andras Bacsai
e39541c318 fix: Traefik middleware 2022-06-09 15:18:21 +02:00
Andras Bacsai
cf88885c94 Merge pull request #452 from coollabsio/next
Fixes for v2.9.7
2022-06-09 14:01:20 +02:00
Andras Bacsai
1192346ce3 fix: remove comments 2022-06-09 14:00:41 +02:00
Andras Bacsai
0e3bd85847 fix: remove console log 2022-06-09 14:00:08 +02:00
Andras Bacsai
edeb6c6965 fix: Plausible script and middlewares 2022-06-09 13:59:09 +02:00
Andras Bacsai
138fd5cb6d Merge pull request #451 from coollabsio/next
v2.9.7
2022-06-09 13:34:02 +02:00
Andras Bacsai
155410bd44 Merge branch 'next' of github.com:coollabsio/coolify into next 2022-06-09 13:31:10 +02:00
Andras Bacsai
20bd829c2e Merge pull request #448 from titouanmathis/feat/gitlab-filter-projects-branches
feat: Add support for search for GitLab applications
2022-06-09 13:31:06 +02:00
Andras Bacsai
7b7e222946 chore: version++ 2022-06-09 13:30:29 +02:00
Andras Bacsai
98d901d06c fix: Plausible custom script 2022-06-08 13:45:42 +02:00
Titouan Mathis
4e862cda6f Add support for accessing all projects and branches for GitLab applications
Fix #236
2022-06-03 12:21:42 +02:00
Andras Bacsai
4e940807ae Merge pull request #446 from coollabsio/next
v2.9.6
2022-06-02 22:05:31 +02:00
Andras Bacsai
b081743f54 fix: pnpm command 2022-06-02 21:59:51 +02:00
Andras Bacsai
34bb9f301f fix: Fider changed an env variable name 2022-06-02 20:37:45 +02:00
Andras Bacsai
ed8a6daeea chore: version++ 2022-06-02 20:37:31 +02:00
Andras Bacsai
9e81ab43ac Merge pull request #445 from coollabsio/next
v2.9.5
2022-06-02 11:24:50 +02:00
Andras Bacsai
32d94cbe97 fix: proxy stop missing argument 2022-06-02 11:13:27 +02:00
Andras Bacsai
46a83aa457 chore: version++ 2022-06-02 11:13:14 +02:00
Andras Bacsai
08d7593ca9 Merge pull request #444 from coollabsio/next
v2.9.4
2022-06-01 10:43:30 +02:00
Andras Bacsai
a50f7a7cc2 fix: Revert gh and gl cloning 2022-06-01 10:42:17 +02:00
Andras Bacsai
2c33447f9f fix: typo 2022-05-31 23:23:39 +02:00
Andras Bacsai
d67a3f51ec fix: Demo version forms 2022-05-31 23:09:55 +02:00
Andras Bacsai
2719974262 chore: Version++ 2022-05-31 22:41:33 +02:00
Andras Bacsai
eb5aebd58d Merge pull request #442 from coollabsio/next
fix: Only reconfigure coolify proxy if its missconfigured
2022-05-31 22:30:09 +02:00
Andras Bacsai
98dbf3d8a5 fix: Only reconfigure coolify proxy if its missconfigured 2022-05-31 22:29:50 +02:00
Andras Bacsai
d9489a2cb4 Merge pull request #441 from coollabsio/next
fix: versions
2022-05-31 22:18:16 +02:00
Andras Bacsai
95832d34f7 fix: versions 2022-05-31 22:17:51 +02:00
Andras Bacsai
d3e9aea63d Merge pull request #440 from coollabsio/next
v2.9.3
2022-05-31 22:14:06 +02:00
Andras Bacsai
d6972e2ed1 fix: Recurisve clone instead of submodule 2022-05-31 21:52:25 +02:00
Andras Bacsai
50844e98be chore: version++ 2022-05-31 21:52:12 +02:00
Andras Bacsai
5c6fcfebf9 Merge pull request #439 from coollabsio/next
v2.9.2
2022-05-31 20:54:55 +02:00
Andras Bacsai
84cfe6fb42 fix: Add GIT ENV variable for submodules 2022-05-31 20:50:47 +02:00
Andras Bacsai
abf0aeb2a8 fix: Force restart proxy on seeding 2022-05-31 20:50:28 +02:00
Andras Bacsai
a7aca0ce8b fix: Only restart coolify proxy in case of version prior to 2.9.2 2022-05-31 20:25:39 +02:00
Andras Bacsai
67bb5d973b fix: force restart proxy 2022-05-31 19:39:25 +02:00
Andras Bacsai
662948d622 fix: TrustProxy 2022-05-31 19:35:07 +02:00
Andras Bacsai
f5bedfdf7f chore: Version++ 2022-05-31 19:34:55 +02:00
Andras Bacsai
db9db61d92 Merge pull request #436 from coollabsio/next
v2.9.1
2022-05-31 14:00:17 +02:00
Andras Bacsai
d255cb1973 chore: version++ 2022-05-31 13:57:55 +02:00
Andras Bacsai
6529271de2 fix: GitHub fixes 2022-05-31 13:57:42 +02:00
Andras Bacsai
0dd32b5319 Merge pull request #435 from coollabsio/next
v2.9.0 - fixes
2022-05-31 12:47:11 +02:00
Andras Bacsai
b032da798b fix: ftp connection 2022-05-31 12:02:09 +02:00
Andras Bacsai
a1a9f1531e fix: Host key verification 2022-05-31 12:01:55 +02:00
Andras Bacsai
f71b54deb2 fix: Otherfqdns 2022-05-31 11:35:04 +02:00
Andras Bacsai
c63430e342 Merge pull request #426 from christopherklint97/fix/lint-errors-services
fix: remove lint errors in database services
2022-05-31 11:25:10 +02:00
Andras Bacsai
6821b128ad Merge pull request #427 from vasani-arpit/main
adding service request link in template chooser
2022-05-31 11:23:52 +02:00
Andras Bacsai
3f8d44a01c Merge branch 'main' into main 2022-05-31 11:23:34 +02:00
Andras Bacsai
3aef04437c Merge pull request #433 from vasani-arpit/next
Support for Github codespaces and Service request issue template
2022-05-31 11:22:01 +02:00
Andras Bacsai
53e32c038b Merge pull request #434 from coollabsio/next
v2.9.0
2022-05-31 11:20:16 +02:00
Andras Bacsai
1660510614 Merge branch 'main' into next 2022-05-28 21:08:21 +02:00
Arpit Vasani
69f5601b3e Updating docs and removing redundant information 2022-05-28 16:50:45 +00:00
Arpit Vasani
6e22fecc98 adding more commands 2022-05-28 15:19:49 +00:00
Arpit Vasani
d18b2b6a1f Update devcontainer.json 2022-05-28 20:33:47 +05:30
Arpit Vasani
4b0370ac08 Let's see if this works 2022-05-28 14:58:52 +00:00
Arpit Vasani
750ef80777 adding support for github codespaces. 2022-05-28 13:08:52 +00:00
Arpit Vasani
59c62923be gradually redirecting users to fider 2022-05-28 13:00:08 +00:00
Arpit Vasani
68b220d06e Merge remote-tracking branch 'upstream/next' into next 2022-05-28 12:59:30 +00:00
Arpit Vasani
250ea64203 pulling latest changes. 2022-05-28 12:55:55 +00:00
Arpit Vasani
0ab57396d2 Removing redundant information, adding support for github codespaces. 2022-05-28 12:49:56 +00:00
Arpit Vasani
1e36856e65 gradually redirecting users to fider 2022-05-23 14:30:28 +05:30
Andras Bacsai
cfdc8db543 Deleted a file, oops 2022-05-19 16:47:45 +02:00
Andras Bacsai
1f25bc411f feat: database + service usage 2022-05-19 16:43:17 +02:00
Andras Bacsai
972f77c790 ui: css 2022-05-19 16:19:20 +02:00
Andras Bacsai
795f99bb47 fix: new source canceled view 2022-05-19 15:18:19 +02:00
Andras Bacsai
54f7142b2b disable appwrite for now 2022-05-19 15:05:56 +02:00
Andras Bacsai
26eacfc2c0 fix: Instant save on demo instance 2022-05-19 14:38:38 +02:00
Andras Bacsai
e2bf02841f fix: Demo instance save domain instantly 2022-05-19 14:36:49 +02:00
Andras Bacsai
6a59b8d27c fix: do not fetch app state in case of missconfiguration 2022-05-19 14:33:02 +02:00
Andras Bacsai
7fc43ef2bb fix: Remove gh token on git source changes 2022-05-19 14:32:47 +02:00
Andras Bacsai
70a3fc247e remove notifications for now 2022-05-19 14:32:31 +02:00
Andras Bacsai
56ab8312f1 remove notifications for now 2022-05-19 14:32:10 +02:00
Andras Bacsai
6fb6a514ac fix: Minio urls + domain checks 2022-05-19 13:45:17 +02:00
Andras Bacsai
b01f5f47b3 fix: PR deployments view 2022-05-19 13:44:45 +02:00
Andras Bacsai
ebdd3601b3 remove console.logs 2022-05-19 10:21:18 +02:00
Andras Bacsai
c0d711170b fix: Proxy for http 2022-05-19 10:20:43 +02:00
scshiv29-dev
da86f0076b Merge branch 'main' into main 2022-05-19 09:54:10 +05:30
scshiv29-dev
9d8551a9be minor fix 2022-05-19 04:23:23 +00:00
Christopher Klint
01ea86479d fix: lint errors in database services 2022-05-18 21:09:01 +02:00
Andras Bacsai
eb62888c39 fix: WIP Traefik 2022-05-18 16:54:04 +02:00
Andras Bacsai
b006fe8f68 fix: remove debug things 2022-05-18 12:33:24 +02:00
Andras Bacsai
dc3add495c wip(fix): traefik 2022-05-18 12:32:53 +02:00
Andras Bacsai
59086e9eb4 fixes 2022-05-17 15:54:11 +02:00
Andras Bacsai
e563988596 fix: Traefik 2022-05-17 14:06:07 +02:00
Andras Bacsai
5a206a140c fix: remove console.log 2022-05-17 13:25:42 +02:00
Andras Bacsai
dbf910ff38 feat: PageLoader 2022-05-17 11:16:58 +02:00
Andras Bacsai
35b31dce2b WIP: Notifications and application usage 2022-05-17 10:14:06 +02:00
Andras Bacsai
1ec620be4b WIP: Traefik 2022-05-16 23:56:54 +02:00
Andras Bacsai
8516ac671a WIP: Traefik 2022-05-16 23:20:50 +02:00
Andras Bacsai
3b7fdebe8c Merge pull request #425 from coollabsio/quickfix
v2.8.2
2022-05-16 16:19:58 +02:00
Andras Bacsai
17ac3048ac chore: Version++ 2022-05-16 16:17:41 +02:00
Andras Bacsai
4e43efef50 fix: Gastby buildpack 2022-05-16 16:17:32 +02:00
Andras Bacsai
4f4f5b1c01 WIP: Traefik 2022-05-16 16:11:35 +02:00
Andras Bacsai
1fa5c5e021 WIP: Traefik migration 2022-05-14 15:23:41 +02:00
Andras Bacsai
436e0e3a2b WIP: Traefik 2022-05-13 18:09:12 +02:00
Andras Bacsai
e717c1d599 add migration 2022-05-13 17:49:38 +02:00
Andras Bacsai
ae5d90eb47 WIP: Traefik 2022-05-12 16:53:22 +02:00
Andras Bacsai
c095cb58b3 migration 2022-05-12 13:02:40 +02:00
Andras Bacsai
6bba37c36d WIP: Traefik?! 2022-05-12 13:02:14 +02:00
Andras Bacsai
60a428a952 Remove WS for now 2022-05-11 23:21:45 +02:00
scshiv29-dev
c376123877 added uvicorn 2022-05-11 22:54:13 +05:30
scshiv29-dev
cd3663038f added uvicorn 2022-05-11 22:45:50 +05:30
Andras Bacsai
16b7c1708b WIP: Testing WS 2022-05-11 16:15:34 +02:00
Andras Bacsai
3435f92fcb WIP: Appwrite 2022-05-11 12:02:09 +02:00
Andras Bacsai
cef571b8cc chore: version++ 2022-05-11 11:03:01 +02:00
Andras Bacsai
242bc61e2d Readme update 2022-05-11 11:02:28 +02:00
Andras Bacsai
c917135bd3 fix: Service checks 2022-05-11 11:02:21 +02:00
Andras Bacsai
3802158ad5 Merge pull request #419 from coollabsio/next
v2.8.1
2022-05-10 18:25:38 +02:00
Andras Bacsai
e452f68614 fix: UI 2022-05-10 18:25:23 +02:00
Andras Bacsai
9586213dd1 fix: WP custom db 2022-05-10 18:21:05 +02:00
Andras Bacsai
30781f218c Merge branch 'main' into next 2022-05-10 14:59:57 +02:00
Andras Bacsai
697c42ff66 update readme 2022-05-10 14:58:49 +02:00
Andras Bacsai
37d8f1847c fix: Default Python package 2022-05-10 14:26:25 +02:00
Andras Bacsai
2af13fff55 chore: Version++ 2022-05-10 14:16:04 +02:00
Andras Bacsai
51e8ca8de0 fix: UI 2022-05-10 13:44:36 +02:00
Andras Bacsai
06228cd2a7 fix: UI 2022-05-10 13:40:30 +02:00
Andras Bacsai
0033baafdc fix: UI 2022-05-10 13:36:39 +02:00
Andras Bacsai
79dfc6a660 Merge pull request #416 from coollabsio/next
v2.8.0
2022-05-10 11:50:07 +02:00
Andras Bacsai
972b0fa811 fix: Remove RC python 2022-05-10 11:14:25 +02:00
Andras Bacsai
ad51a9ebc8 feat: Python image selection 2022-05-10 11:10:58 +02:00
Andras Bacsai
51a40d049d fix: UI 2022-05-10 10:54:47 +02:00
Andras Bacsai
8b3113bd92 fix: UI 2022-05-10 10:34:50 +02:00
Andras Bacsai
d6b6938555 fix: Navbar UI 2022-05-10 10:14:48 +02:00
Andras Bacsai
ce52608f19 feat: WP could have custom db 2022-05-10 10:12:13 +02:00
Andras Bacsai
ede37d296b fix: UI 2022-05-09 23:52:49 +02:00
Andras Bacsai
6374b1284b fix Mongodb icon 2022-05-09 23:51:40 +02:00
Andras Bacsai
6ac8dd8907 fix: Expose ports for services 2022-05-09 15:18:25 +02:00
Andras Bacsai
24c655d7ef feat: Custom script path for Plausible 2022-05-09 15:05:24 +02:00
Andras Bacsai
1f087cc29a fix: Server usage only shown for root team 2022-05-09 14:11:34 +02:00
Andras Bacsai
c3684a1650 chore: version++ 2022-05-09 14:08:13 +02:00
Andras Bacsai
a410fd0776 feat: usage on dashboard 2022-05-09 14:03:07 +02:00
Andras Bacsai
271fb1358d feat: show usage trends 2022-05-09 13:01:00 +02:00
Andras Bacsai
a4d53a28eb feat: Basic server usage on dashboard 2022-05-09 12:45:17 +02:00
Andras Bacsai
e69e32f6c7 fix: default packagemanager 2022-05-09 09:12:21 +02:00
Andras Bacsai
650409dde3 chore: version++ 2022-05-07 10:41:52 +02:00
Andras Bacsai
f3f4bb5105 fix: No image for Docker buildpack 2022-05-07 10:41:31 +02:00
Andras Bacsai
9c02af6b52 Update github-actions.yml 2022-05-06 15:41:42 +02:00
Andras Bacsai
6a3f4ba171 Merge pull request #413 from coollabsio/next
v2.7.0 again
2022-05-06 15:40:30 +02:00
Andras Bacsai
6a6426fe6b fix: Sentry 2022-05-06 15:40:07 +02:00
Andras Bacsai
21256746c3 Update github-actions.yml 2022-05-06 15:23:18 +02:00
Andras Bacsai
c34d643f95 Update github-actions.yml 2022-05-06 15:22:22 +02:00
Andras Bacsai
0be402af82 Update github-actions.yml 2022-05-06 15:18:38 +02:00
Andras Bacsai
b5b0b6524d Update github-actions.yml 2022-05-06 15:10:28 +02:00
Andras Bacsai
22f1a3c908 Merge pull request #412 from coollabsio/next
v2.7.0
2022-05-06 14:55:45 +02:00
Andras Bacsai
fa5f439858 fix: Cancel 2022-05-06 14:45:50 +02:00
Andras Bacsai
7cc760eecf fix: Disable sentry for now 2022-05-06 14:43:28 +02:00
Andras Bacsai
af0652f6b2 fix: DNS check 2022-05-06 14:07:43 +02:00
Andras Bacsai
9e009bebaa fix 2022-05-06 11:53:53 +02:00
Andras Bacsai
8e53ae3484 fix: Check DNS in prod only 2022-05-06 11:42:24 +02:00
Andras Bacsai
7ceb8f1537 fix: Better DNS check to prevent errors 2022-05-06 11:41:39 +02:00
Andras Bacsai
b0eae8cfe9 fix: Cancel old builds in database 2022-05-06 09:55:39 +02:00
Andras Bacsai
febef372b8 fix: Cancel jobs 2022-05-06 08:42:06 +02:00
Andras Bacsai
a18e3659aa TODO for myself 2022-05-05 15:28:32 +02:00
Andras Bacsai
e2e342851a fix: Remove debug info 2022-05-05 15:25:22 +02:00
Andras Bacsai
bee3292088 fixes 2022-05-05 15:23:34 +02:00
Andras Bacsai
f56d4dbbb3 fix: Check domain for coolify before saving 2022-05-05 15:18:13 +02:00
Andras Bacsai
eccd7c96d7 fix: Do not run SSL renew in development 2022-05-05 13:31:36 +02:00
Andras Bacsai
4046c472ed chore: version++ 2022-05-05 13:29:46 +02:00
Andras Bacsai
0da4a1024a Add feedback link 2022-05-05 13:22:42 +02:00
Andras Bacsai
aa2f328640 fix: logos for dbs 2022-05-05 13:20:59 +02:00
Andras Bacsai
4d22b610b6 Merge pull request #398 from vasani-arpit/main
Structured issue making using new github issues forms.
2022-05-05 13:10:10 +02:00
Andras Bacsai
e91c3eab9c Merge pull request #396 from Cyril-Beeckman/main
[WIP] Added MariaDB database
2022-05-05 13:06:58 +02:00
Andras Bacsai
2e8fd6f0c7 fix: exposedPorts 2022-05-04 15:45:44 +02:00
Andras Bacsai
90fde24b40 Merge pull request #318 from CharcoalStyles/exposePort
Added expose port for applications
2022-05-04 15:45:16 +02:00
Andras Bacsai
02a1f50776 Update README.md 2022-05-04 14:40:00 +02:00
Andras Bacsai
57b97a9204 Merge pull request #411 from coollabsio/gh-actions
GitHub Actions for release
2022-05-04 14:37:50 +02:00
Andras Bacsai
1ec03693d3 Update github-actions.yml 2022-05-04 14:31:03 +02:00
Andras Bacsai
4246d86694 Update github-actions.yml 2022-05-04 14:30:35 +02:00
Andras Bacsai
2cce1f8459 Update github-actions.yml 2022-05-04 14:11:46 +02:00
Andras Bacsai
3937cfec53 Update github-actions.yml 2022-05-04 14:08:24 +02:00
Andras Bacsai
259aeeb67a Update github-actions.yml 2022-05-04 14:06:06 +02:00
Andras Bacsai
9d53bc0926 Update github-actions.yml 2022-05-04 14:03:00 +02:00
Andras Bacsai
1211f3c9fd Update github-actions.yml 2022-05-04 13:59:59 +02:00
Andras Bacsai
c07d6aa702 Update github-actions.yml 2022-05-04 13:44:48 +02:00
Andras Bacsai
4f662dbf21 Update github-actions.yml 2022-05-04 13:36:46 +02:00
Andras Bacsai
a4301c5d23 Update github-actions.yml 2022-05-04 13:32:35 +02:00
Andras Bacsai
86b7824c78 Update github-actions.yml 2022-05-04 13:22:54 +02:00
Andras Bacsai
435f063c36 Update and rename github-actions-demo.yml to github-actions.yml 2022-05-04 13:20:46 +02:00
Andras Bacsai
902a764ff2 Update github-actions-demo.yml 2022-05-04 13:06:15 +02:00
Andras Bacsai
4097378847 Update github-actions-demo.yml 2022-05-04 13:05:01 +02:00
Andras Bacsai
5f3567e808 Create github-actions-demo.yml 2022-05-04 13:02:16 +02:00
Andras Bacsai
7325353ced Merge pull request #406 from coollabsio/next
v2.6.3
2022-05-03 22:50:53 +02:00
Andras Bacsai
68f5b32876 fix: missing node versions 2022-05-03 22:49:52 +02:00
Andras Bacsai
8d4eaad920 Merge pull request #402 from coollabsio/next
v2.6.2
2022-05-03 12:11:39 +02:00
Andras Bacsai
4b38865cc9 fix: Webhook build images 2022-05-03 12:07:37 +02:00
Aaron Styles
326f0dac1b Merge github.com:coollabsio/coolify into exposePort 2022-05-03 16:14:58 +10:00
Arpit Vasani
828faaf2b1 Update --bug-report.yaml 2022-05-03 11:38:49 +05:30
Arpit Vasani
9582664406 Create --task.yaml 2022-05-03 11:37:32 +05:30
Arpit Vasani
ec5474b72b Create --feature-request.yaml 2022-05-03 11:36:13 +05:30
Arpit Vasani
62d1011d9f Create --bug-report.yaml 2022-05-03 11:33:19 +05:30
Arpit Vasani
0a7ec6bd20 Create config.yml 2022-05-03 11:29:10 +05:30
Cyril Beeckman
b84c37cd8f Update README and remove duplicate for NextJS 2022-05-02 20:27:39 +02:00
Cyril Beeckman
887d65e512 Change MariaDB logo 2022-05-02 17:06:25 +02:00
Cyril Beeckman
3543a9c809 Added MariaDB database 2022-05-02 16:25:24 +02:00
Aaron Styles
f94e17134e Added expose port for Services 2022-04-30 22:47:00 +10:00
Aaron Styles
40cbee0d75 Removed some checking that doesn't work properly. Added a switch for exposing a port. 2022-04-30 21:34:00 +10:00
Aaron Styles
c98ed5338a Merged upstream and fixed expose port implementation 2022-04-28 21:49:13 +10:00
Aaron Charcoal Styles
27f1e1d7cd Merge branch 'main' into exposePort 2022-04-11 09:49:07 +00:00
Aaron Charcoal Styles
8f3f9ebade Merge branch 'main' into exposePort 2022-04-08 18:48:16 +00:00
Aaron Styles
1bd33fea98 Added expose port for applications 2022-04-08 17:12:01 +10:00
527 changed files with 22061 additions and 20819 deletions

16
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node/.devcontainer/base.Dockerfile
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
ARG VARIANT="16-bullseye"
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# [Optional] Uncomment if you want to install an additional version of node using nvm
# ARG EXTRA_NODE_VERSION=10
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
# [Optional] Uncomment if you want to install more global node modules
RUN su node -c "npm install -g pnpm"

View File

@@ -0,0 +1,33 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node
{
"name": "Node.js",
"build": {
"dockerfile": "Dockerfile",
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
// Append -bullseye or -buster to pin to an OS version.
// Use -bullseye variants on local arm64/Apple Silicon.
"args": {
"VARIANT": "16-bullseye"
}
},
// Set *default* container specific settings.json values on container create.
"settings": {},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"dbaeumer.vscode-eslint",
"svelte.svelte-vscode",
"ardenivanov.svelte-intellisense",
"Prisma.prisma"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [3000],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "node",
"features": {
"docker-in-docker": "20.10",
"github-cli": "latest"
}
}

View File

@@ -1,16 +1,12 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
/yarn.lock
/.pnpm-store
/ssl
build
.svelte-kit
package
.env
.env.prod
.env.stag
/db/*.db
/db/*.db-journal
/data/haproxy/haproxy.cfg
/data/haproxy/haproxy.cfg.lkg
.env.*
!.env.example
dist
client
apps/api/db/*.db
local-serve

View File

@@ -1,8 +0,0 @@
COOLIFY_APP_ID=
COOLIFY_SECRET_KEY=12341234123412341234123412341234
COOLIFY_DATABASE_URL=file:../db/dev.db
COOLIFY_SENTRY_DSN=
COOLIFY_IS_ON=docker
COOLIFY_WHITE_LABELED=false
COOLIFY_WHITE_LABELED_ICON=
COOLIFY_AUTO_UPDATE=false

View File

@@ -0,0 +1,47 @@
name: 🐞 Bug report
description: Create a bug report to help us improve coolify
title: "[Bug]: "
labels: [Bug]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report! Please fill the form in English
- type: checkboxes
attributes:
label: Is there an existing issue for this?
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Description
description: A concise description of what you're experiencing and what you expect.
placeholder: |
When I do <X>, <Y> happens and I see the error message attached below:
```...```
What I expect is <Z>
validations:
required: true
- type: textarea
attributes:
label: Steps To Reproduce
description: Add steps to reproduce this behaviour, include console / network logs & videos
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: "The version of your coolify Instance"
placeholder: "2.5.2"
validations:
required: true

View File

@@ -0,0 +1,31 @@
name: 🛠️ Feature request
description: Suggest an idea to improve coolify
title: '[Feature]: '
labels: [Enhancement]
assignees:
- andrasbacsai
- vasani-arpit
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue related to this feature request already exists.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: Summary
description: One paragraph description of the feature.
validations:
required: true
- type: textarea
attributes:
label: Why should this be worked on?
description: A concise description of the problems or use cases for this feature request.
validations:
required: true

20
.github/ISSUE_TEMPLATE/--task.yaml vendored Normal file
View File

@@ -0,0 +1,20 @@
name: 📝 Task
description: Create a task for the team to work on
title: "[Task]: "
labels: [Task]
body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue related to this already exists.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: SubTasks
placeholder: |
- Sub Task 1
- Sub Task 2
validations:
required: false

8
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,8 @@
blank_issues_enabled: true
contact_links:
- name: 🤔 Questions and Help
url: https://discord.com/invite/6rDM4fkymF
about: Reach out to us on discord or our github discussions page.
- name: 🙋‍♂️ service request
url: https://feedback.coolify.io/
about: want to request a new service? for e.g wordpress, hasura, appwrite etc...

View File

@@ -0,0 +1,37 @@
name: production-release
on:
release:
types: [published]
jobs:
making-something-cool:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get current package version
uses: martinbeentjes/npm-get-version-action@v1.2.3
id: package-version
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
cache-from: type=registry,ref=coollabsio/coolify:buildcache
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
- uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}

35
.github/workflows/staging-release.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: staging-release
on:
push:
branches:
- next
jobs:
staging-release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64
push: true
tags: coollabsio/coolify:next
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next,mode=max
- uses: sarisia/actions-status-discord@v1
if: always()
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_CHANNEL }}

23
.gitignore vendored
View File

@@ -1,16 +1,13 @@
.DS_Store
node_modules
/build
/.svelte-kit
/package
/yarn.lock
/.pnpm-store
/ssl
build
.svelte-kit
package
.env
.env.prod
.env.stag
/db/*.db
/db/*.db-journal
/data/haproxy/haproxy.cfg
/data/haproxy/haproxy.cfg.lkg
.env.*
!.env.example
dist
client
apps/api/db/*.db
local-serve
apps/api/db/migration.db-journal

11
.gitpod.yml Normal file
View File

@@ -0,0 +1,11 @@
# This configuration file was automatically generated by Gitpod.
# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)
# and commit this file to your remote git repository to share the goodness with others.
image: gitpod/workspace-node:2022-06-20-19-54-55
tasks:
- init: pnpm install && pnpm db:push && pnpm db:seed
command: pnpm dev
ports:
- port: 3001
visibility: public

1
.husky/_/.gitignore vendored
View File

@@ -1 +0,0 @@
*

View File

@@ -1,4 +0,0 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
pnpm lint-staged

View File

@@ -1,5 +0,0 @@
{
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
]
}

View File

@@ -2,6 +2,8 @@
First of all, thank you for considering contributing to my project! It means a lot 💜.
Contribution guide is for v2, not applicable for v3
## 🙋 Want to help?
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
@@ -15,7 +17,13 @@ This is a little list of what you can do to help the project:
## 👋 Introduction
🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS.
### Setup with github codespaces
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
### Setup locally in your machine
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
#### Recommended Pull Request Guideline
@@ -35,20 +43,16 @@ Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I re
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
#### Setup a local development environment
#### Steps for local setup
- Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
- Install dependencies with `pnpm install`.
- Need to create a local SQlite database with `pnpm db:push`.
- This will apply all migrations at `db/dev.db`.
- Seed the database with base entities with `pnpm db:seed`
- You can start coding after starting `pnpm dev`.
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
2. Install dependencies with `pnpm install`.
3. Need to create a local SQlite database with `pnpm db:push`.
#### How to start after you set up your local fork?
This will apply all migrations at `db/dev.db`.
This repository works better with [pnpm](https://pnpm.io) due to the lock file. I recommend you to give it a try and use `pnpm` as well because it is cool and efficient!
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
4. Seed the database with base entities with `pnpm db:seed`
5. You can start coding after starting `pnpm dev`.
## 🧑‍💻 Developer contribution

View File

@@ -1,17 +1,18 @@
FROM node:16.14.2-alpine as install
FROM node:18-alpine as build
WORKDIR /app
RUN apk add --no-cache curl
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
RUN pnpm add -g pnpm
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
COPY package*.json .
COPY . .
RUN pnpm install
RUN pnpm build
FROM node:16.14.2-alpine
ARG TARGETPLATFORM
# Production build
FROM node:18-alpine
WORKDIR /app
ENV NODE_ENV production
ARG TARGETPLATFORM
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
@@ -19,24 +20,24 @@ ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
PRISMA_CLIENT_ENGINE_TYPE=binary
COPY --from=coollabsio/prisma-engine:latest /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
COPY --from=install /app/node_modules ./node_modules
COPY . .
COPY --from=coollabsio/prisma-engine:3.15 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
RUN pnpm add -g pnpm
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
RUN mkdir -p ~/.docker/cli-plugins/
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
RUN pnpm prisma generate
RUN pnpm build
COPY --from=build /app/apps/api/build/ .
COPY --from=build /app/apps/ui/build/ ./public
COPY --from=build /app/apps/api/prisma/ ./prisma
COPY --from=build /app/apps/api/package.json .
COPY --from=build /app/docker-compose.yaml .
RUN pnpm install -p
EXPOSE 3000
CMD ["pnpm", "start"]
CMD pnpm start

View File

@@ -6,7 +6,11 @@ An open-source & self-hostable Heroku / Netlify alternative.
https://demo.coolify.io/
(If it is unresponsive, that means someone overloaded the server. 🙃)
(If it is unresponsive, that means someone overloaded the server. 😄)
## Feedback
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
## How to install
@@ -26,6 +30,8 @@ For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
## Features
ARM support is in beta!
### Git Sources
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
@@ -52,19 +58,21 @@ These are the predefined build packs, but with the Docker build pack, you can ho
- NuxtJS
- NextJS
- React/Preact
- NextJS
- Gatsby
- Svelte
- PHP
- Laravel
- Rust
- Docker
- Python
- Deno
### Databases
One-click database is ready to be used internally or shared over the internet:
- MongoDB
- MariaDB
- MySQL
- PostgreSQL
- CouchDB
@@ -74,9 +82,9 @@ One-click database is ready to be used internally or shared over the internet:
You can host cool open-source services as well:
- [WordPress](https://wordpress.org)
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
- [Ghost](https://ghost.org)
- [Plausible Analytics](https://plausible.io)
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
- [NocoDB](https://nocodb.com)
- [VSCode Server](https://github.com/cdr/code-server)
- [MinIO](https://min.io)
@@ -91,7 +99,7 @@ You can host cool open-source services as well:
## Migration from v1
A fresh installation is necessary. v2 is not compatible with v1.
A fresh installation is necessary. v2 and v3 are not compatible with v1.
## Support

9
apps/api/.eslintignore Normal file
View File

@@ -0,0 +1,9 @@
seed.js
.DS_Store
node_modules
build
.env
.env.*
!.env.example
dist
dev.db

11
apps/api/.eslintrc Normal file
View File

@@ -0,0 +1,11 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint", "prettier"],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"prettier"
]
}

11
apps/api/.gitignore vendored Normal file
View File

@@ -0,0 +1,11 @@
.DS_Store
node_modules
build
.svelte-kit
package
.env
.env.*
!.env.example
dist
dev.db
client

0
apps/api/db/.gitkeep Normal file
View File

7
apps/api/nodemon.json Normal file
View File

@@ -0,0 +1,7 @@
{
"watch": ["src"],
"ignore": ["src/**/*.test.ts"],
"ext": "ts,mjs,json,graphql",
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
"legacyWatch": true
}

68
apps/api/package.json Normal file
View File

@@ -0,0 +1,68 @@
{
"name": "coolify-api",
"description": "Coolify's Fastify API",
"license": "AGPL-3.0",
"scripts": {
"db:push": "prisma db push && prisma generate",
"db:seed": "prisma db seed",
"db:studio": "prisma studio",
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
"dev": "nodemon",
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
},
"dependencies": {
"@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.1.0",
"@fastify/cookie": "7.1.0",
"@fastify/cors": "8.0.0",
"@fastify/env": "4.0.0",
"@fastify/jwt": "6.3.1",
"@fastify/static": "6.4.0",
"@iarna/toml": "2.2.5",
"@prisma/client": "3.15.2",
"axios": "0.27.2",
"bcryptjs": "2.4.3",
"bree": "9.1.1",
"cabin": "9.1.2",
"compare-versions": "4.1.3",
"cuid": "2.1.8",
"dayjs": "1.11.3",
"dockerode": "3.3.2",
"dotenv-extended": "2.9.0",
"fastify": "4.2.1",
"fastify-plugin": "4.0.0",
"generate-password": "1.7.0",
"get-port": "6.1.2",
"got": "12.1.0",
"is-ip": "4.0.0",
"js-yaml": "4.1.0",
"jsonwebtoken": "8.5.1",
"node-forge": "1.3.1",
"node-os-utils": "1.3.7",
"p-queue": "7.2.0",
"strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1"
},
"devDependencies": {
"@types/node": "18.0.4",
"@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.30.6",
"@typescript-eslint/parser": "5.30.6",
"esbuild": "0.14.49",
"eslint": "8.19.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-prettier": "4.2.1",
"nodemon": "2.0.19",
"prettier": "2.7.1",
"prisma": "3.15.2",
"rimraf": "3.0.2",
"tsconfig-paths": "4.0.0",
"typescript": "4.7.4"
},
"prisma": {
"seed": "node prisma/seed.js"
}
}

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PlausibleAnalytics" (
"id" TEXT NOT NULL PRIMARY KEY,
"email" TEXT,
"username" TEXT,
"password" TEXT NOT NULL,
"postgresqlUser" TEXT NOT NULL,
"postgresqlPassword" TEXT NOT NULL,
"postgresqlDatabase" TEXT NOT NULL,
"postgresqlPublicPort" INTEGER,
"secretKeyBase" TEXT,
"scriptName" TEXT NOT NULL DEFAULT 'plausible.js',
"serviceId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_PlausibleAnalytics" ("createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username") SELECT "createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username" FROM "PlausibleAnalytics";
DROP TABLE "PlausibleAnalytics";
ALTER TABLE "new_PlausibleAnalytics" RENAME TO "PlausibleAnalytics";
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,32 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Wordpress" (
"id" TEXT NOT NULL PRIMARY KEY,
"extraConfig" TEXT,
"tablePrefix" TEXT,
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
"mysqlHost" TEXT,
"mysqlPort" INTEGER,
"mysqlUser" TEXT NOT NULL,
"mysqlPassword" TEXT NOT NULL,
"mysqlRootUser" TEXT NOT NULL,
"mysqlRootUserPassword" TEXT NOT NULL,
"mysqlDatabase" TEXT,
"mysqlPublicPort" INTEGER,
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
"ftpUser" TEXT,
"ftpPassword" TEXT,
"ftpPublicPort" INTEGER,
"ftpHostKey" TEXT,
"ftpHostKeyPrivate" TEXT,
"serviceId" TEXT NOT NULL,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
DROP TABLE "Wordpress";
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Setting" (
"id" TEXT NOT NULL PRIMARY KEY,
"fqdn" TEXT,
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"minPort" INTEGER NOT NULL DEFAULT 9000,
"maxPort" INTEGER NOT NULL DEFAULT 9100,
"proxyPassword" TEXT NOT NULL,
"proxyUser" TEXT NOT NULL,
"proxyHash" TEXT,
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL
);
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
DROP TABLE "Setting";
ALTER TABLE "new_Setting" RENAME TO "Setting";
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Minio" ADD COLUMN "apiFqdn" TEXT;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "Application" ADD COLUMN "deploymentType" TEXT;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_GitSource" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"type" TEXT,
"apiUrl" TEXT,
"htmlUrl" TEXT,
"customPort" INTEGER NOT NULL DEFAULT 22,
"organization" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
DROP TABLE "GitSource";
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -20,6 +20,7 @@ model Setting {
proxyHash String?
isAutoUpdateEnabled Boolean @default(false)
isDNSCheckEnabled Boolean @default(true)
isTraefikUsed Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
@@ -84,11 +85,13 @@ model Application {
buildPack String?
projectId Int?
port Int?
exposePort Int?
installCommand String?
buildCommand String?
startCommand String?
baseDirectory String?
publishDirectory String?
deploymentType String?
phpModules String?
pythonWSGI String?
pythonModule String?
@@ -215,6 +218,7 @@ model GitSource {
type String?
apiUrl String?
htmlUrl String?
customPort Int @default(22)
organization String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -289,6 +293,7 @@ model Service {
id String @id @default(cuid())
name String
fqdn String?
exposePort Int?
dualCerts Boolean @default(false)
type String?
version String?
@@ -308,6 +313,7 @@ model Service {
umami Umami?
hasura Hasura?
fider Fider?
moodle Moodle?
}
model PlausibleAnalytics {
@@ -320,6 +326,7 @@ model PlausibleAnalytics {
postgresqlDatabase String
postgresqlPublicPort Int?
secretKeyBase String?
scriptName String @default("plausible.js")
serviceId String @unique
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
@@ -331,6 +338,7 @@ model Minio {
rootUser String
rootUserPassword String
publicPort Int?
apiFqdn String?
serviceId String @unique
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
@@ -350,6 +358,9 @@ model Wordpress {
id String @id @default(cuid())
extraConfig String?
tablePrefix String?
ownMysql Boolean @default(false)
mysqlHost String?
mysqlPort Int?
mysqlUser String
mysqlPassword String
mysqlRootUser String
@@ -441,3 +452,20 @@ model Fider {
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
model Moodle {
id String @id @default(cuid())
serviceId String @unique
defaultUsername String
defaultPassword String
defaultEmail String
mariadbUser String
mariadbPassword String
mariadbRootUser String
mariadbRootUserPassword String
mariadbDatabase String
mariadbPublicPort Int?
service Service @relation(fields: [serviceId], references: [id])
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}

View File

@@ -1,10 +1,10 @@
const dotEnvExtended = require('dotenv-extended');
dotEnvExtended.load();
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
const crypto = require('crypto');
const generator = require('generate-password');
const cuid = require('cuid');
const { PrismaClient } = require('@prisma/client');
const prisma = new PrismaClient();
function generatePassword(length = 24) {
return generator.generate({
@@ -33,6 +33,7 @@ async function main() {
id: settingsFound.id
},
data: {
isTraefikUsed: true,
proxyHash: null
}
});
@@ -84,20 +85,4 @@ const encrypt = (text) => {
content: encrypted.toString('hex')
});
}
};
const decrypt = (hashString) => {
if (hashString) {
const hash = JSON.parse(hashString);
const decipher = crypto.createDecipheriv(
algorithm,
process.env['COOLIFY_SECRET_KEY'],
Buffer.from(hash.iv, 'hex')
);
const decrpyted = Buffer.concat([
decipher.update(Buffer.from(hash.content, 'hex')),
decipher.final()
]);
return decrpyted.toString();
}
};
};

141
apps/api/src/index.ts Normal file
View File

@@ -0,0 +1,141 @@
import Fastify from 'fastify';
import cors from '@fastify/cors';
import serve from '@fastify/static';
import env from '@fastify/env';
import cookie from '@fastify/cookie';
import path, { join } from 'path';
import autoLoad from '@fastify/autoload';
import { asyncExecShell, isDev, prisma } from './lib/common';
import { scheduler } from './lib/scheduler';
declare module 'fastify' {
interface FastifyInstance {
config: {
COOLIFY_APP_ID: string,
COOLIFY_SECRET_KEY: string,
COOLIFY_DATABASE_URL: string,
COOLIFY_SENTRY_DSN: string,
COOLIFY_IS_ON: string,
COOLIFY_WHITE_LABELED: boolean,
COOLIFY_WHITE_LABELED_ICON: string | null,
COOLIFY_AUTO_UPDATE: boolean,
};
}
}
const port = isDev ? 3001 : 3000;
const host = '0.0.0.0';
const fastify = Fastify({
logger: false,
trustProxy: true
});
const schema = {
type: 'object',
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
properties: {
COOLIFY_APP_ID: {
type: 'string',
},
COOLIFY_SECRET_KEY: {
type: 'string',
},
COOLIFY_DATABASE_URL: {
type: 'string',
default: 'file:../db/dev.db'
},
COOLIFY_SENTRY_DSN: {
type: 'string',
default: null
},
COOLIFY_IS_ON: {
type: 'string',
default: 'docker'
},
COOLIFY_WHITE_LABELED: {
type: 'boolean',
default: false
},
COOLIFY_WHITE_LABELED_ICON: {
type: 'string',
default: null
},
COOLIFY_AUTO_UPDATE: {
type: 'boolean',
default: false
},
}
};
const options = {
schema,
dotenv: true
};
fastify.register(env, options);
if (!isDev) {
fastify.register(serve, {
root: path.join(__dirname, './public'),
preCompressed: true
});
fastify.setNotFoundHandler(async function (request, reply) {
if (request.raw.url && request.raw.url.startsWith('/api')) {
return reply.status(404).send({
success: false
});
}
return reply.status(200).sendFile('index.html');
});
}
fastify.register(autoLoad, {
dir: join(__dirname, 'plugins')
});
fastify.register(autoLoad, {
dir: join(__dirname, 'routes')
});
fastify.register(cookie)
fastify.register(cors);
fastify.listen({ port, host }, async (err: any, address: any) => {
if (err) {
console.error(err);
process.exit(1);
}
console.log(`Coolify's API is listening on ${host}:${port}`);
await initServer()
await scheduler.start('deployApplication');
await scheduler.start('cleanupStorage');
await scheduler.start('checkProxies')
// Check if no build is running
// Check for update
setInterval(async () => {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
if (isAutoUpdateEnabled) {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:autoUpdater");
}
}
}, 60000 * 15)
// Cleanup storage
setInterval(async () => {
if (scheduler.workers.has('deployApplication')) {
scheduler.workers.get('deployApplication').postMessage("status:cleanupStorage");
}
}, 60000 * 10)
scheduler.on('worker deleted', async (name) => {
if (name === 'autoUpdater' || name === 'cleanupStorage') {
if (!scheduler.workers.has('deployApplication')) await scheduler.start('deployApplication');
}
});
});
async function initServer() {
try {
await asyncExecShell(`docker network create --attachable coolify`);
} catch (error) { }
}

View File

@@ -0,0 +1,43 @@
import axios from 'axios';
import compareVersions from 'compare-versions';
import { parentPort } from 'node:worker_threads';
import { asyncExecShell, asyncSleep, isDev, prisma, version } from '../lib/common';
(async () => {
if (parentPort) {
try {
const currentVersion = version;
const { data: versions } = await axios
.get(
`https://get.coollabs.io/versions.json`
, {
params: {
appId: process.env['COOLIFY_APP_ID'] || undefined,
version: currentVersion
}
})
const latestVersion = versions['coolify'].main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isUpdateAvailable === 1) {
const activeCount = 0
if (activeCount === 0) {
if (!isDev) {
console.log(`Updating Coolify to ${latestVersion}.`);
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
await asyncExecShell(`env | grep COOLIFY > .env`);
await asyncExecShell(
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
);
} else {
console.log('Updating (not really in dev mode).');
}
}
}
} catch (error) {
console.log(error);
} finally {
await prisma.$disconnect();
}
} else process.exit(0);
})();

View File

@@ -0,0 +1,88 @@
import { parentPort } from 'node:worker_threads';
import { prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, asyncExecShell } from '../lib/common';
import { checkContainer, getEngine } from '../lib/docker';
(async () => {
if (parentPort) {
// Coolify Proxy
const engine = '/var/run/docker.sock';
const localDocker = await prisma.destinationDocker.findFirst({
where: { engine, network: 'coolify' }
});
if (localDocker && localDocker.isCoolifyProxyUsed) {
// Remove HAProxy
const found = await checkContainer(engine, 'coolify-haproxy');
const host = getEngine(engine);
if (found) {
await asyncExecShell(
`DOCKER_HOST="${host}" docker stop -t 0 coolify-haproxy && docker rm coolify-haproxy`
);
}
await startTraefikProxy(engine);
}
// TCP Proxies
const databasesWithPublicPort = await prisma.database.findMany({
where: { publicPort: { not: null } },
include: { settings: true, destinationDocker: true }
});
for (const database of databasesWithPublicPort) {
const { destinationDockerId, destinationDocker, publicPort, id } = database;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
const { privatePort } = generateDatabaseConfiguration(database);
// Remove HAProxy
const found = await checkContainer(engine, `haproxy-for-${publicPort}`);
const host = getEngine(engine);
if (found) {
await asyncExecShell(
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${publicPort} && docker rm haproxy-for-${publicPort}`
);
}
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
}
}
const wordpressWithFtp = await prisma.wordpress.findMany({
where: { ftpPublicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const ftp of wordpressWithFtp) {
const { service, ftpPublicPort } = ftp;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
// Remove HAProxy
const found = await checkContainer(engine, `haproxy-for-${ftpPublicPort}`);
const host = getEngine(engine);
if (found) {
await asyncExecShell(
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${ftpPublicPort} && docker rm haproxy-for-${ftpPublicPort} `
);
}
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
}
}
// HTTP Proxies
const minioInstances = await prisma.minio.findMany({
where: { publicPort: { not: null } },
include: { service: { include: { destinationDocker: true } } }
});
for (const minio of minioInstances) {
const { service, publicPort } = minio;
const { destinationDockerId, destinationDocker, id } = service;
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
// Remove HAProxy
const found = await checkContainer(engine, `${id}-${publicPort}`);
const host = getEngine(engine);
if (found) {
await asyncExecShell(
`DOCKER_HOST="${host}" docker stop -t 0 ${id}-${publicPort} && docker rm ${id}-${publicPort}`
);
}
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
}
}
await prisma.$disconnect();
} else process.exit(0);
})();

View File

@@ -0,0 +1,60 @@
import { parentPort } from 'node:worker_threads';
import { asyncExecShell, cleanupDockerStorage, isDev, prisma, version } from '../lib/common';
import { getEngine } from '../lib/docker';
(async () => {
if (parentPort) {
const destinationDockers = await prisma.destinationDocker.findMany();
const engines = [...new Set(destinationDockers.map(({ engine }) => engine))];
for (const engine of engines) {
let lowDiskSpace = false;
const host = getEngine(engine);
try {
let stdout = null
if (!isDev) {
const output = await asyncExecShell(
`DOCKER_HOST=${host} docker exec coolify sh -c 'df -kPT /'`
);
stdout = output.stdout;
} else {
const output = await asyncExecShell(
`df -kPT /`
);
stdout = output.stdout;
}
let lines = stdout.trim().split('\n');
let header = lines[0];
let regex =
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
const boundaries = [];
let match;
while ((match = regex.exec(header))) {
boundaries.push(match[0].length);
}
boundaries[boundaries.length - 1] = -1;
const data = lines.slice(1).map((line) => {
const cl = boundaries.map((boundary) => {
const column = boundary > 0 ? line.slice(0, boundary) : line;
line = line.slice(boundary);
return column.trim();
});
return {
capacity: Number.parseInt(cl[5], 10) / 100
};
});
if (data.length > 0) {
const { capacity } = data[0];
if (capacity > 0.8) {
lowDiskSpace = true;
}
}
} catch (error) {
console.log(error);
}
await cleanupDockerStorage(host, lowDiskSpace, false)
}
await prisma.$disconnect();
} else process.exit(0);
})();

View File

@@ -0,0 +1,359 @@
import { parentPort } from 'node:worker_threads';
import crypto from 'crypto';
import fs from 'fs/promises';
import yaml from 'js-yaml';
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
import { asyncExecShell, createDirectories, decrypt, getDomain, prisma } from '../lib/common';
import { dockerInstance, getEngine } from '../lib/docker';
import * as importers from '../lib/importers';
import * as buildpacks from '../lib/buildPacks';
(async () => {
if (parentPort) {
const concurrency = 1
const PQueue = await import('p-queue');
const queue = new PQueue.default({ concurrency });
parentPort.on('message', async (message) => {
if (parentPort) {
if (message === 'error') throw new Error('oops');
if (message === 'cancel') {
parentPort.postMessage('cancelled');
return;
}
if (message === 'status:autoUpdater') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
return;
}
if (message === 'status:cleanupStorage') {
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
return;
}
await queue.add(async () => {
const {
id: applicationId,
repository,
name,
destinationDocker,
destinationDockerId,
gitSource,
build_id: buildId,
configHash,
fqdn,
projectId,
secrets,
phpModules,
type,
pullmergeRequestId = null,
sourceBranch = null,
settings,
persistentStorage,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
exposePort,
baseImage,
baseBuildImage,
deploymentType,
} = message
let {
branch,
buildPack,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
dockerFileLocation,
denoMainFile
} = message
try {
const { debug } = settings;
if (concurrency === 1) {
await prisma.build.updateMany({
where: {
status: { in: ['queued', 'running'] },
id: { not: buildId },
applicationId,
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
},
data: { status: 'failed' }
});
}
let imageId = applicationId;
let domain = getDomain(fqdn);
const volumes =
persistentStorage?.map((storage) => {
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
}${storage.path}`;
}) || [];
// Previews, we need to get the source branch and set subdomain
if (pullmergeRequestId) {
branch = sourceBranch;
domain = `${pullmergeRequestId}.${domain}`;
imageId = `${applicationId}-${pullmergeRequestId}`;
}
let deployNeeded = true;
let destinationType;
if (destinationDockerId) {
destinationType = 'docker';
}
if (destinationType === 'docker') {
const docker = dockerInstance({ destinationDocker });
const host = getEngine(destinationDocker.engine);
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
const { workdir, repodir } = await createDirectories({ repository, buildId });
const configuration = await setDefaultConfiguration(message);
buildPack = configuration.buildPack;
port = configuration.port;
installCommand = configuration.installCommand;
startCommand = configuration.startCommand;
buildCommand = configuration.buildCommand;
publishDirectory = configuration.publishDirectory;
baseDirectory = configuration.baseDirectory;
dockerFileLocation = configuration.dockerFileLocation;
denoMainFile = configuration.denoMainFile;
const commit = await importers[gitSource.type]({
applicationId,
debug,
workdir,
repodir,
githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort,
repository,
branch,
buildId,
apiUrl: gitSource.apiUrl,
htmlUrl: gitSource.htmlUrl,
projectId,
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null
});
if (!commit) {
throw new Error('No commit found?');
}
let tag = commit.slice(0, 7);
if (pullmergeRequestId) {
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
}
try {
await prisma.build.update({ where: { id: buildId }, data: { commit } });
} catch (err) {
console.log(err);
}
if (!pullmergeRequestId) {
const currentHash = crypto
//@ts-ignore
.createHash('sha256')
.update(
JSON.stringify({
buildPack,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
secrets,
branch,
repository,
fqdn
})
)
.digest('hex');
if (configHash !== currentHash) {
await prisma.application.update({
where: { id: applicationId },
data: { configHash: currentHash }
});
deployNeeded = true;
if (configHash) {
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
}
} else {
deployNeeded = false;
}
} else {
deployNeeded = true;
}
const image = await docker.engine.getImage(`${applicationId}:${tag}`);
let imageFound = false;
try {
await image.inspect();
imageFound = true;
} catch (error) {
//
}
if (!imageFound || deployNeeded) {
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
if (buildpacks[buildPack])
await buildpacks[buildPack]({
buildId,
applicationId,
domain,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
publishDirectory,
debug,
commit,
tag,
workdir,
docker,
port: exposePort ? `${exposePort}:${port}` : port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
phpModules,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
});
else {
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
throw new Error(`Build pack ${buildPack} not found.`);
}
} else {
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${imageId}`);
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${imageId}`);
} catch (error) {
//
}
const envs = [];
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
envs.push(`${secret.name}=${secret.value}`);
}
}
});
}
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
const labels = makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port: exposePort ? `${exposePort}:${port}` : port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
});
let envFound = false;
try {
envFound = !!(await fs.stat(`${workdir}/.env`));
} catch (error) {
//
}
try {
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
const composeVolumes = volumes.map((volume) => {
return {
[`${volume.split(':')[0]}`]: {
name: volume.split(':')[0]
}
};
});
const composeFile = {
version: '3.8',
services: {
[imageId]: {
image: `${applicationId}:${tag}`,
container_name: imageId,
volumes,
env_file: envFound ? [`${workdir}/.env`] : [],
networks: [docker.network],
labels,
depends_on: [],
restart: 'always',
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
// logging: {
// driver: 'fluentd',
// },
deploy: {
restart_policy: {
condition: 'on-failure',
delay: '5s',
max_attempts: 3,
window: '120s'
}
}
}
},
networks: {
[docker.network]: {
external: true
}
},
volumes: Object.assign({}, ...composeVolumes)
};
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
await asyncExecShell(
`DOCKER_HOST=${host} docker compose --project-directory ${workdir} up -d`
);
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
} catch (error) {
await saveBuildLog({ line: error, buildId, applicationId });
await prisma.build.update({
where: { id: message.build_id },
data: { status: 'failed' }
});
throw new Error(error);
}
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
}
}
catch (error) {
await prisma.build.update({
where: { id: message.build_id },
data: { status: 'failed' }
});
await saveBuildLog({ line: error, buildId, applicationId });
} finally {
await prisma.$disconnect();
}
});
await prisma.$disconnect();
}
});
} else process.exit(0);
})();

View File

@@ -1,240 +1,23 @@
import { base64Encode } from '$lib/crypto';
import { getDomain, saveBuildLog, version } from '$lib/common';
import * as db from '$lib/database';
import { scanningTemplates } from '$lib/components/templates';
import { asyncExecShell, base64Encode, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
import { scheduler } from "../scheduler";
import { promises as fs } from 'fs';
import { staticDeployments } from '$lib/components/common';
import { day } from "../dayjs";
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
const nodeBased = [
'react',
'preact',
'vuejs',
'svelte',
'gatsby',
'php',
'astro',
'eleventy',
'node',
'nestjs'
'nestjs',
'nuxtjs',
'nextjs'
];
export function makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId = null,
buildPack,
repository,
branch,
projectId,
port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
}) {
if (pullmergeRequestId) {
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
const domain = getDomain(fqdn);
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
}
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.type=standalone-application`,
`coolify.configuration=${base64Encode(
JSON.stringify({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
})
)}`
];
}
export async function makeLabelForStandaloneDatabase({ id, image, volume }) {
const database = await db.prisma.database.findFirst({ where: { id } });
delete database.destinationDockerId;
delete database.createdAt;
delete database.updatedAt;
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.type=standalone-database`,
`coolify.configuration=${base64Encode(
JSON.stringify({
version,
image,
volume,
...database
})
)}`
];
}
export function makeLabelForServices(type) {
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.type=service`,
`coolify.service.type=${type}`
];
}
export const setDefaultConfiguration = async (data) => {
let {
buildPack,
port,
installCommand,
startCommand,
buildCommand,
publishDirectory,
baseDirectory,
dockerFileLocation,
denoMainFile
} = data;
const template = scanningTemplates[buildPack];
if (!port) {
port = template?.port || 3000;
if (buildPack === 'static') port = 80;
else if (buildPack === 'node') port = 3000;
else if (buildPack === 'php') port = 80;
else if (buildPack === 'python') port = 8000;
}
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
installCommand = template?.installCommand || 'yarn install';
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
startCommand = template?.startCommand || 'yarn start';
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
buildCommand = template?.buildCommand || null;
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
}
if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
} else {
dockerFileLocation = '/Dockerfile';
}
if (!denoMainFile) {
denoMainFile = 'main.ts';
}
return {
buildPack,
port,
installCommand,
startCommand,
buildCommand,
publishDirectory,
baseDirectory,
dockerFileLocation,
denoMainFile
};
};
export async function copyBaseConfigurationFiles(
buildPack,
workdir,
buildId,
applicationId,
baseImage
) {
try {
if (buildPack === 'php') {
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
await saveBuildLog({
line: 'Copied default configuration file for PHP.',
buildId,
applicationId
});
} else if (staticDeployments.includes(buildPack) && baseImage.includes('nginx')) {
await fs.writeFile(
`${workdir}/nginx.conf`,
`user nginx;
worker_processes auto;
error_log /docker.stdout;
pid /run/nginx.pid;
events {
worker_connections 1024;
}
http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /docker.stdout main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
server {
listen 80;
server_name localhost;
location / {
root /app;
index index.html;
try_files $uri $uri/index.html $uri/ /index.html =404;
}
error_page 404 /50x.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /app;
}
}
}
`
);
}
} catch (error) {
console.log(error);
throw new Error(error);
}
}
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
return (
installCommand?.includes('pnpm') ||
buildCommand?.includes('pnpm') ||
startCommand?.includes('pnpm')
);
}
export function setDefaultBaseImage(buildPack) {
export function setDefaultBaseImage(buildPack: string | null, deploymentType: string | null = null) {
const nodeVersions = [
{
value: 'node:lts',
@@ -403,18 +186,90 @@ export function setDefaultBaseImage(buildPack) {
label: 'webdevops/php-nginx:7.1-alpine'
}
];
let payload = {
const pythonVersions = [
{
value: 'python:3.10-alpine',
label: 'python:3.10-alpine'
},
{
value: 'python:3.10-buster',
label: 'python:3.10-buster'
},
{
value: 'python:3.10-bullseye',
label: 'python:3.10-bullseye'
},
{
value: 'python:3.10-slim-bullseye',
label: 'python:3.10-slim-bullseye'
},
{
value: 'python:3.9-alpine',
label: 'python:3.9-alpine'
},
{
value: 'python:3.9-buster',
label: 'python:3.9-buster'
},
{
value: 'python:3.9-bullseye',
label: 'python:3.9-bullseye'
},
{
value: 'python:3.9-slim-bullseye',
label: 'python:3.9-slim-bullseye'
},
{
value: 'python:3.8-alpine',
label: 'python:3.8-alpine'
},
{
value: 'python:3.8-buster',
label: 'python:3.8-buster'
},
{
value: 'python:3.8-bullseye',
label: 'python:3.8-bullseye'
},
{
value: 'python:3.8-slim-bullseye',
label: 'python:3.8-slim-bullseye'
},
{
value: 'python:3.7-alpine',
label: 'python:3.7-alpine'
},
{
value: 'python:3.7-buster',
label: 'python:3.7-buster'
},
{
value: 'python:3.7-bullseye',
label: 'python:3.7-bullseye'
},
{
value: 'python:3.7-slim-bullseye',
label: 'python:3.7-slim-bullseye'
}
];
let payload: any = {
baseImage: null,
baseBuildImage: null,
baseImages: [],
baseBuildImages: []
};
if (nodeBased.includes(buildPack)) {
payload.baseImage = 'node:lts';
payload.baseImages = nodeVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
if (deploymentType === 'static') {
payload.baseImage = 'webdevops/nginx:alpine';
payload.baseImages = staticVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
} else {
payload.baseImage = 'node:lts';
payload.baseImages = nodeVersions;
payload.baseBuildImage = 'node:lts';
payload.baseBuildImages = nodeVersions;
}
}
if (staticApps.includes(buildPack)) {
payload.baseImage = 'webdevops/nginx:alpine';
@@ -423,7 +278,8 @@ export function setDefaultBaseImage(buildPack) {
payload.baseBuildImages = nodeVersions;
}
if (buildPack === 'python') {
payload.baseImage = 'python:3-alpine';
payload.baseImage = 'python:3.10-alpine';
payload.baseImages = pythonVersions;
}
if (buildPack === 'rust') {
payload.baseImage = 'rust:latest';
@@ -445,3 +301,421 @@ export function setDefaultBaseImage(buildPack) {
}
return payload;
}
export const setDefaultConfiguration = async (data: any) => {
let {
buildPack,
port,
installCommand,
startCommand,
buildCommand,
publishDirectory,
baseDirectory,
dockerFileLocation,
denoMainFile
} = data;
//@ts-ignore
const template = scanningTemplates[buildPack];
if (!port) {
port = template?.port || 3000;
if (buildPack === 'static') port = 80;
else if (buildPack === 'node') port = 3000;
else if (buildPack === 'php') port = 80;
else if (buildPack === 'python') port = 8000;
}
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
installCommand = template?.installCommand || 'yarn install';
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
startCommand = template?.startCommand || 'yarn start';
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
buildCommand = template?.buildCommand || null;
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
if (baseDirectory) {
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
}
if (dockerFileLocation) {
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
} else {
dockerFileLocation = '/Dockerfile';
}
if (!denoMainFile) {
denoMainFile = 'main.ts';
}
return {
buildPack,
port,
installCommand,
startCommand,
buildCommand,
publishDirectory,
baseDirectory,
dockerFileLocation,
denoMainFile
};
};
export const scanningTemplates = {
'@sveltejs/kit': {
buildPack: 'nodejs'
},
astro: {
buildPack: 'astro'
},
'@11ty/eleventy': {
buildPack: 'eleventy'
},
svelte: {
buildPack: 'svelte'
},
'@nestjs/core': {
buildPack: 'nestjs'
},
next: {
buildPack: 'nextjs'
},
nuxt: {
buildPack: 'nuxtjs'
},
'react-scripts': {
buildPack: 'react'
},
'parcel-bundler': {
buildPack: 'static'
},
'@vue/cli-service': {
buildPack: 'vuejs'
},
vuejs: {
buildPack: 'vuejs'
},
gatsby: {
buildPack: 'gatsby'
},
'preact-cli': {
buildPack: 'react'
}
};
export const saveBuildLog = async ({
line,
buildId,
applicationId
}: {
line: string;
buildId: string;
applicationId: string;
}): Promise<any> => {
if (line && typeof line === 'string' && line.includes('ghs_')) {
const regex = /ghs_.*@/g;
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
}
const addTimestamp = `[${generateTimestamp()}] ${line}`;
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
return await prisma.buildLog.create({
data: {
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
}
});
};
export async function copyBaseConfigurationFiles(
buildPack,
workdir,
buildId,
applicationId,
baseImage
) {
try {
if (buildPack === 'php') {
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
await saveBuildLog({
line: 'Copied default configuration file for PHP.',
buildId,
applicationId
});
} else if (baseImage?.includes('nginx')) {
await fs.writeFile(
`${workdir}/nginx.conf`,
`user nginx;
worker_processes auto;
error_log /docker.stdout;
pid /run/nginx.pid;
events {
worker_connections 1024;
}
http {
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /docker.stdout main;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
server {
listen 80;
server_name localhost;
location / {
root /app;
index index.html;
try_files $uri $uri/index.html $uri/ /index.html =404;
}
error_page 404 /50x.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /app;
}
}
}
`
);
}
} catch (error) {
console.log(error);
throw new Error(error);
}
}
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
return (
installCommand?.includes('pnpm') ||
buildCommand?.includes('pnpm') ||
startCommand?.includes('pnpm')
);
}
export async function buildImage({
applicationId,
tag,
workdir,
docker,
buildId,
isCache = false,
debug = false,
dockerFileLocation = '/Dockerfile'
}) {
if (isCache) {
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
} else {
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
}
if (!debug && isCache) {
await saveBuildLog({
line: `Debug turned off. To see more details, allow it in the configuration.`,
buildId,
applicationId
});
}
const stream = await docker.engine.buildImage(
{ src: ['.'], context: workdir },
{
dockerfile: isCache ? `${dockerFileLocation}-cache` : dockerFileLocation,
t: `${applicationId}:${tag}${isCache ? '-cache' : ''}`
}
);
await streamEvents({ stream, docker, buildId, applicationId, debug });
await saveBuildLog({ line: `Building image successful!`, buildId, applicationId });
}
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
await new Promise((resolve, reject) => {
docker.engine.modem.followProgress(stream, onFinished, onProgress);
function onFinished(err, res) {
if (err) reject(err);
resolve(res);
}
async function onProgress(event) {
if (event.error) {
reject(event.error);
} else if (event.stream) {
if (event.stream !== '\n') {
if (debug)
await saveBuildLog({
line: `${event.stream.replace('\n', '')}`,
buildId,
applicationId
});
}
}
}
});
}
export function makeLabelForStandaloneApplication({
applicationId,
fqdn,
name,
type,
pullmergeRequestId = null,
buildPack,
repository,
branch,
projectId,
port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
}) {
if (pullmergeRequestId) {
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
const domain = getDomain(fqdn);
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
}
return [
'coolify.managed=true',
`coolify.version=${version}`,
`coolify.type=standalone-application`,
`coolify.configuration=${base64Encode(
JSON.stringify({
applicationId,
fqdn,
name,
type,
pullmergeRequestId,
buildPack,
repository,
branch,
projectId,
port,
commit,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory
})
)}`
];
}
export async function buildCacheImageWithNode(data, imageForBuild) {
const {
applicationId,
tag,
workdir,
docker,
buildId,
baseDirectory,
installCommand,
buildCommand,
debug,
secrets,
pullmergeRequestId
} = data;
const isPnpm = checkPnpm(installCommand, buildCommand);
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
});
}
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
if (installCommand) {
Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`);
Dockerfile.push(`RUN ${installCommand}`);
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${buildCommand}`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
}
export async function buildCacheImageForLaravel(data, imageForBuild) {
const { applicationId, tag, workdir, docker, buildId, debug, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
});
}
Dockerfile.push(`COPY *.json *.mix.js /app/`);
Dockerfile.push(`COPY resources /app/resources`);
Dockerfile.push(`RUN yarn install && yarn production`);
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
}
export async function buildCacheImageWithCargo(data, imageForBuild) {
const {
applicationId,
tag,
workdir,
docker,
buildId,
baseDirectory,
installCommand,
buildCommand,
debug,
secrets
} = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push('RUN cargo install cargo-chef');
Dockerfile.push('COPY . .');
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
Dockerfile.push(`FROM ${imageForBuild}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push('RUN cargo install cargo-chef');
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
}

View File

@@ -1,5 +1,5 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
@@ -42,9 +42,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
Dockerfile.push(`RUN deno cache deps.ts`);
}
Dockerfile.push(`COPY ${denoMainFile} /app`);
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
Dockerfile.push(`ENV NO_COLOR true`);
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);

View File

@@ -1,5 +1,5 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildImage } from './common';
export default async function ({
applicationId,

View File

@@ -1,26 +1,26 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, imageforBuild): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${imageforBuild}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
if (baseImage.includes('nginx')) {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const { baseImage, baseBuildImage } = data;
await buildCacheImageWithNode(data, baseImage);
await createDockerfile(data, baseBuildImage);
await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {
throw error;

View File

@@ -1,8 +1,8 @@
import { buildCacheImageForLaravel, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageForLaravel, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { workdir, applicationId, tag, buildId } = data;
const { workdir, applicationId, tag, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
@@ -24,7 +24,7 @@ const createDockerfile = async (data, image): Promise<void> => {
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
);
Dockerfile.push(`COPY --chown=application:application . ./`);
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,5 +1,5 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
@@ -10,8 +10,7 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);

View File

@@ -1,18 +1,22 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { checkPnpm } from './common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
applicationId,
buildId,
tag,
workdir,
publishDirectory,
port,
installCommand,
buildCommand,
startCommand,
baseDirectory,
secrets,
pullmergeRequestId
pullmergeRequestId,
deploymentType,
baseImage
} = data;
const Dockerfile: Array<string> = [];
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
@@ -35,25 +39,36 @@ const createDockerfile = async (data, image): Promise<void> => {
});
}
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
if (buildCommand) {
if (deploymentType === 'node') {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
Dockerfile.push(`RUN ${buildCommand}`);
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
} else if (deploymentType === 'static') {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`EXPOSE 80`);
}
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
if (deploymentType === 'node') {
await createDockerfile(data, baseImage);
await buildImage(data);
} else if (deploymentType === 'static') {
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
}
} catch (error) {
throw error;
}

View File

@@ -1,6 +1,5 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { checkPnpm } from './common';
import { buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
@@ -36,8 +35,7 @@ const createDockerfile = async (data, image): Promise<void> => {
});
}
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
@@ -51,7 +49,7 @@ const createDockerfile = async (data, image): Promise<void> => {
export default async function (data) {
try {
const { baseImage, baseBuildImage } = data;
const { baseImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
} catch (error) {

View File

@@ -1,10 +1,13 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { checkPnpm } from './common';
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
applicationId,
buildId,
tag,
workdir,
publishDirectory,
port,
installCommand,
buildCommand,
@@ -12,7 +15,8 @@ const createDockerfile = async (data, image): Promise<void> => {
baseDirectory,
secrets,
pullmergeRequestId,
buildId
deploymentType,
baseImage
} = data;
const Dockerfile: Array<string> = [];
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
@@ -35,24 +39,36 @@ const createDockerfile = async (data, image): Promise<void> => {
});
}
if (isPnpm) {
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
Dockerfile.push('RUN pnpm add -g pnpm');
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
}
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
if (buildCommand) {
if (deploymentType === 'node') {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
Dockerfile.push(`RUN ${installCommand}`);
Dockerfile.push(`RUN ${buildCommand}`);
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
} else if (deploymentType === 'static') {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
Dockerfile.push(`EXPOSE 80`);
}
Dockerfile.push(`EXPOSE ${port}`);
Dockerfile.push(`CMD ${startCommand}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};
export default async function (data) {
try {
const { baseImage, baseBuildImage } = data;
await createDockerfile(data, baseImage);
await buildImage(data);
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
if (deploymentType === 'node') {
await createDockerfile(data, baseImage);
await buildImage(data);
} else if (deploymentType === 'static') {
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
await createDockerfile(data, baseImage);
await buildImage(data);
}
} catch (error) {
throw error;
}

View File

@@ -1,8 +1,8 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildImage } from './common';
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
const { workdir, baseDirectory, buildId } = data;
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = [];
let composerFound = false;
try {
@@ -12,6 +12,21 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
secrets.forEach((secret) => {
if (secret.isBuildSecret) {
if (pullmergeRequestId) {
if (secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
if (!secret.isPRMRSecret) {
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
}
}
}
});
}
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
if (htaccessFound) {
@@ -22,7 +37,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
}
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,5 +1,5 @@
import { buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
@@ -34,6 +34,8 @@ const createDockerfile = async (data, image): Promise<void> => {
}
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
Dockerfile.push(`RUN pip install gunicorn`);
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
Dockerfile.push(`RUN pip install uvicorn`);
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
@@ -50,6 +52,8 @@ const createDockerfile = async (data, image): Promise<void> => {
Dockerfile.push(`EXPOSE ${port}`);
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
Dockerfile.push(
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`

View File

@@ -1,18 +1,18 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
if (baseImage.includes('nginx')) {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,7 +1,7 @@
import { asyncExecShell } from '$lib/common';
import { buildCacheImageWithCargo, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import TOML from '@iarna/toml';
import { asyncExecShell } from '../common';
import { buildCacheImageWithCargo, buildImage } from './common';
const createDockerfile = async (data, image, name): Promise<void> => {
const { workdir, port, applicationId, tag, buildId } = data;

View File

@@ -1,5 +1,5 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const {
@@ -12,7 +12,8 @@ const createDockerfile = async (data, image): Promise<void> => {
secrets,
pullmergeRequestId,
baseImage,
buildId
buildId,
port
} = data;
const Dockerfile: Array<string> = [];
@@ -39,10 +40,10 @@ const createDockerfile = async (data, image): Promise<void> => {
} else {
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
}
if (baseImage.includes('nginx')) {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,18 +1,18 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
if (baseImage.includes('nginx')) {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

View File

@@ -1,18 +1,18 @@
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
import { promises as fs } from 'fs';
import { buildCacheImageWithNode, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => {
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`);
Dockerfile.push('WORKDIR /app');
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
if (baseImage.includes('nginx')) {
if (baseImage?.includes('nginx')) {
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
}
Dockerfile.push(`EXPOSE 80`);
Dockerfile.push(`EXPOSE ${port}`);
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
};

1577
apps/api/src/lib/common.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -4,4 +4,4 @@ import relativeTime from 'dayjs/plugin/relativeTime.js';
dayjs.extend(utc);
dayjs.extend(relativeTime);
export { dayjs };
export { dayjs as day };

View File

@@ -0,0 +1,78 @@
import { asyncExecShell } from './common';
import Dockerode from 'dockerode';
export function getEngine(engine: string): string {
return engine === '/var/run/docker.sock' ? 'unix:///var/run/docker.sock' : engine;
}
export function dockerInstance({ destinationDocker }): { engine: Dockerode; network: string } {
return {
engine: new Dockerode({
socketPath: destinationDocker.engine
}),
network: destinationDocker.network
};
}
export async function checkContainer(engine: string, container: string, remove = false): Promise<boolean> {
const host = getEngine(engine);
let containerFound = false;
try {
const { stdout } = await asyncExecShell(
`DOCKER_HOST="${host}" docker inspect --format '{{json .State}}' ${container}`
);
const parsedStdout = JSON.parse(stdout);
const status = parsedStdout.Status;
const isRunning = status === 'running';
if (status === 'created') {
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
}
if (remove && status === 'exited') {
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
}
if (isRunning) {
containerFound = true;
}
} catch (err) {
// Container not found
}
return containerFound;
}
export async function isContainerExited(engine: string, containerName: string): Promise<boolean> {
let isExited = false;
const host = getEngine(engine);
try {
const { stdout } = await asyncExecShell(
`DOCKER_HOST="${host}" docker inspect -f '{{.State.Status}}' ${containerName}`
);
if (stdout.trim() === 'exited') {
isExited = true;
}
} catch (error) {
//
}
return isExited;
}
export async function removeContainer({
id,
engine
}: {
id: string;
engine: string;
}): Promise<void> {
const host = getEngine(engine);
try {
const { stdout } = await asyncExecShell(
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
);
if (JSON.parse(stdout).Running) {
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${id}`);
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${id}`);
}
} catch (error) {
console.log(error);
throw error;
}
}

View File

@@ -1,7 +1,7 @@
import { asyncExecShell, saveBuildLog } from '$lib/common';
import got from 'got';
import jsonwebtoken from 'jsonwebtoken';
import * as db from '$lib/database';
import { saveBuildLog } from '../buildPacks/common';
import { asyncExecShell, decrypt, prisma } from '../common';
export default async function ({
applicationId,
@@ -11,7 +11,8 @@ export default async function ({
apiUrl,
htmlUrl,
branch,
buildId
buildId,
customPort
}: {
applicationId: string;
workdir: string;
@@ -21,10 +22,16 @@ export default async function ({
htmlUrl: string;
branch: string;
buildId: string;
customPort: number;
}): Promise<string> {
const { default: got } = await import('got')
const url = htmlUrl.replace('https://', '').replace('http://', '');
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
const { privateKey, appId, installationId } = await db.getUniqueGithubApp({ githubAppId });
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
const { privateKey, appId, installationId } = body
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
const payload = {
@@ -49,7 +56,7 @@ export default async function ({
applicationId
});
await asyncExecShell(
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
);
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', '');

View File

@@ -1,4 +1,5 @@
import { asyncExecShell, saveBuildLog } from '$lib/common';
import { saveBuildLog } from "../buildPacks/common";
import { asyncExecShell } from "../common";
export default async function ({
applicationId,
@@ -8,7 +9,8 @@ export default async function ({
repository,
branch,
buildId,
privateSshKey
privateSshKey,
customPort
}: {
applicationId: string;
workdir: string;
@@ -18,6 +20,7 @@ export default async function ({
buildId: string;
repodir: string;
privateSshKey: string;
customPort: number;
}): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
@@ -31,7 +34,7 @@ export default async function ({
});
await asyncExecShell(
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
);
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
return commit.replace('\n', '');

View File

@@ -0,0 +1,52 @@
import Bree from 'bree';
import path from 'path';
import Cabin from 'cabin';
import TSBree from '@breejs/ts-worker';
import { isDev } from './common';
Bree.extend(TSBree);
const options: any = {
defaultExtension: 'js',
logger: false,
workerMessageHandler: async ({ name, message }) => {
if (name === 'deployApplication') {
if (message.pending === 0 && message.size === 0) {
if (message.caller === 'autoUpdater') {
if (!scheduler.workers.has('autoUpdater')) {
await scheduler.stop('deployApplication');
await scheduler.run('autoUpdater')
}
}
if (message.caller === 'cleanupStorage') {
if (!scheduler.workers.has('cleanupStorage')) {
await scheduler.stop('deployApplication');
await scheduler.run('cleanupStorage')
}
}
}
}
},
jobs: [
{
name: 'deployApplication'
},
{
name: 'cleanupStorage',
},
{
name: 'checkProxies',
interval: '10s'
},
{
name: 'autoUpdater',
}
],
};
if (isDev) options.root = path.join(__dirname, '../jobs');
export const scheduler = new Bree(options);

View File

@@ -0,0 +1,479 @@
// Example:
// export const nocodb = [{
// name: 'postgreslUser',
// isEditable: false,
// isLowerCase: false,
// isNumber: false,
// isBoolean: false,
// isEncrypted: false
// }]
export const plausibleAnalytics = [{
name: 'email',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
}, {
name: 'username',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'password',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'postgresqlUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'postgresqlPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'postgresqlDatabase',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'postgresqlPublicPort',
isEditable: false,
isLowerCase: false,
isNumber: true,
isBoolean: false,
isEncrypted: false
},
{
name: 'secretKeyBase',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'scriptName',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
}]
export const minio = [{
name: 'apiFqdn',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
}, {
name: 'rootUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'rootUserPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const vscodeserver = [{
name: 'password',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const wordpress = [{
name: 'extraConfig',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mysqlHost',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mysqlPort',
isEditable: true,
isLowerCase: false,
isNumber: true,
isBoolean: false,
isEncrypted: false
},
{
name: 'mysqlUser',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mysqlPassword',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mysqlRootUser',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mysqlRootUserPassword',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mysqlDatabase',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'ftpPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const ghost = [{
name: 'defaultEmail',
isEditable: false,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'defaultPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mariadbPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbRootUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mariadbRootUserPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbDatabase',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
}]
export const meiliSearch = [{
name: 'masterKey',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const umami = [{
name: 'postgresqlUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'postgresqlPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'postgresqlDatabase',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'umamiAdminPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'hashSalt',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const hasura = [{
name: 'postgresqlUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'postgresqlPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'postgresqlDatabase',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'graphQLAdminPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}]
export const fider = [{
name: 'jwtSecret',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
}, {
name: 'postgreslUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'postgresqlPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'emailNoreply',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'emailSmtpHost',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'emailSmtpPassword',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'emailSmtpPort',
isEditable: true,
isLowerCase: false,
isNumber: true,
isBoolean: false,
isEncrypted: false
},
{
name: 'emailSmtpUser',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'emailSmtpEnableStartTls',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: true,
isEncrypted: false
},
{
name: 'emailMailgunApiKey',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'emailMailgunDomain',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'emailMailgunRegion',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
}]
export const moodle = [{
name: 'defaultEmail',
isEditable: true,
isLowerCase: true,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'defaultUsername',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'defaultPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mariadbPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbRootUser',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
},
{
name: 'mariadbRootUserPassword',
isEditable: false,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: true
},
{
name: 'mariadbDatabase',
isEditable: true,
isLowerCase: false,
isNumber: false,
isBoolean: false,
isEncrypted: false
}]

View File

@@ -0,0 +1,34 @@
import fp from 'fastify-plugin'
import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt'
declare module "@fastify/jwt" {
interface FastifyJWT {
user: {
userId: string,
teamId: string,
permission: string,
isAdmin: boolean
}
}
}
export default fp<FastifyJWTOptions>(async (fastify, opts) => {
fastify.register(fastifyJwt, {
secret: fastify.config.COOLIFY_SECRET_KEY
})
fastify.decorate("authenticate", async function (request, reply) {
try {
await request.jwtVerify()
} catch (err) {
console.log(err)
reply.send(err)
}
})
})
declare module 'fastify' {
export interface FastifyInstance {
authenticate(): Promise<void>
}
}

View File

@@ -0,0 +1,909 @@
import cuid from 'cuid';
import crypto from 'node:crypto'
import jsonwebtoken from 'jsonwebtoken';
import axios from 'axios';
import { FastifyReply } from 'fastify';
import { day } from '../../../../lib/dayjs';
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
import { asyncExecShell, checkDomainsIsValidInDNS, checkDoubleBranch, decrypt, encrypt, errorHandler, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, prisma, stopBuild, uniqueName } from '../../../../lib/common';
import { checkContainer, dockerInstance, getEngine, isContainerExited, removeContainer } from '../../../../lib/docker';
import { scheduler } from '../../../../lib/scheduler';
import type { FastifyRequest } from 'fastify';
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication } from './types';
import { OnlyId } from '../../../../types';
export async function listApplications(request: FastifyRequest) {
try {
const { teamId } = request.user
const applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { teams: true }
});
const settings = await prisma.setting.findFirst()
return {
applications,
settings
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getImages(request: FastifyRequest<GetImages>) {
try {
const { buildPack, deploymentType } = request.body
let publishDirectory = undefined;
let port = undefined
const { baseImage, baseBuildImage, baseBuildImages, baseImages, } = setDefaultBaseImage(
buildPack, deploymentType
);
if (buildPack === 'nextjs') {
if (deploymentType === 'static') {
publishDirectory = 'out'
port = '80'
} else {
publishDirectory = ''
port = '3000'
}
}
if (buildPack === 'nuxtjs') {
if (deploymentType === 'static') {
publishDirectory = 'dist'
port = '80'
} else {
publishDirectory = ''
port = '3000'
}
}
return { baseImage, baseBuildImage, baseBuildImages, baseImages, publishDirectory, port }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getApplication(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const { teamId } = request.user
const appId = process.env['COOLIFY_APP_ID'];
let isRunning = false;
let isExited = false;
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId && application.destinationDocker?.engine) {
isRunning = await checkContainer(application.destinationDocker.engine, id);
isExited = await isContainerExited(application.destinationDocker.engine, id);
}
return {
isQueueActive: scheduler.workers.has('deployApplication'),
isRunning,
isExited,
application,
appId
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function newApplication(request: FastifyRequest, reply: FastifyReply) {
try {
const name = uniqueName();
const { teamId } = request.user
const { id } = await prisma.application.create({
data: {
name,
teams: { connect: { id: teamId } },
settings: { create: { debug: false, previews: false } }
}
});
return reply.code(201).send({ id });
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
function decryptApplication(application: any) {
if (application) {
if (application?.gitSource?.githubApp?.clientSecret) {
application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null;
}
if (application?.gitSource?.githubApp?.webhookSecret) {
application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null;
}
if (application?.gitSource?.githubApp?.privateKey) {
application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null;
}
if (application?.gitSource?.gitlabApp?.appSecret) {
application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null;
}
if (application?.secrets.length > 0) {
application.secrets = application.secrets.map((s: any) => {
s.value = decrypt(s.value) || null
return s;
});
}
return application;
}
}
export async function getApplicationFromDB(id: string, teamId: string) {
try {
let application = await prisma.application.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: {
destinationDocker: true,
settings: true,
gitSource: { include: { githubApp: true, gitlabApp: true } },
secrets: true,
persistentStorage: true
}
});
if (!application) {
throw { status: 404, message: 'Application not found.' };
}
application = decryptApplication(application);
const buildPack = application?.buildPack || null;
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
buildPack
);
// Set default build images
if (!application.baseImage) {
application.baseImage = baseImage;
}
if (!application.baseBuildImage) {
application.baseBuildImage = baseBuildImage;
}
return { ...application, baseBuildImages, baseImages };
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getApplicationFromDBWebhook(projectId: number, branch: string) {
try {
let application = await prisma.application.findFirst({
where: { projectId, branch, settings: { autodeploy: true } },
include: {
destinationDocker: true,
settings: true,
gitSource: { include: { githubApp: true, gitlabApp: true } },
secrets: true,
persistentStorage: true
}
});
if (!application) {
throw { status: 500, message: 'Application not configured.' }
}
application = decryptApplication(application);
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
application.buildPack
);
// Set default build images
if (!application.baseImage) {
application.baseImage = baseImage;
}
if (!application.baseBuildImage) {
application.baseBuildImage = baseBuildImage;
}
return { ...application, baseBuildImages, baseImages };
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
try {
const { id } = request.params
let {
name,
buildPack,
fqdn,
port,
exposePort,
installCommand,
buildCommand,
startCommand,
baseDirectory,
publishDirectory,
pythonWSGI,
pythonModule,
pythonVariable,
dockerFileLocation,
denoMainFile,
denoOptions,
baseImage,
baseBuildImage,
deploymentType
} = request.body
if (port) port = Number(port);
if (exposePort) {
exposePort = Number(exposePort);
}
if (denoOptions) denoOptions = denoOptions.trim();
const defaultConfiguration = await setDefaultConfiguration({
buildPack,
port,
installCommand,
startCommand,
buildCommand,
publishDirectory,
baseDirectory,
dockerFileLocation,
denoMainFile
});
await prisma.application.update({
where: { id },
data: {
name,
fqdn,
exposePort,
pythonWSGI,
pythonModule,
pythonVariable,
denoOptions,
baseImage,
baseBuildImage,
deploymentType,
...defaultConfiguration
}
});
return reply.code(201).send();
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) {
try {
const { id } = request.params
const { debug, previews, dualCerts, autodeploy, branch, projectId } = request.body
const isDouble = await checkDoubleBranch(branch, projectId);
if (isDouble && autodeploy) {
await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false } })
throw { status: 500, message: 'Cannot activate automatic deployments until only one application is defined for this repository / branch.' }
}
await prisma.application.update({
where: { id },
data: { settings: { update: { debug, previews, dualCerts, autodeploy } } },
include: { destinationDocker: true }
});
return reply.code(201).send();
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function stopApplication(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const { teamId } = request.user
const application: any = await getApplicationFromDB(id, teamId);
if (application?.destinationDockerId && application.destinationDocker?.engine) {
const { engine } = application.destinationDocker;
const found = await checkContainer(engine, id);
if (found) {
await removeContainer({ id, engine });
}
}
return reply.code(201).send();
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteApplication(request: FastifyRequest<DeleteApplication>, reply: FastifyReply) {
try {
const { id } = request.params
const { teamId } = request.user
const application = await prisma.application.findUnique({
where: { id },
include: { destinationDocker: true }
});
if (application?.destinationDockerId && application.destinationDocker?.engine && application.destinationDocker?.network) {
const host = getEngine(application.destinationDocker.engine);
const { stdout: containers } = await asyncExecShell(
`DOCKER_HOST=${host} docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
);
if (containers) {
const containersArray = containers.trim().split('\n');
for (const container of containersArray) {
const containerObj = JSON.parse(container);
const id = containerObj.ID;
await removeContainer({ id, engine: application.destinationDocker.engine });
}
}
}
await prisma.applicationSettings.deleteMany({ where: { application: { id } } });
await prisma.buildLog.deleteMany({ where: { applicationId: id } });
await prisma.build.deleteMany({ where: { applicationId: id } });
await prisma.secret.deleteMany({ where: { applicationId: id } });
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
if (teamId === '0') {
await prisma.application.deleteMany({ where: { id } });
} else {
await prisma.application.deleteMany({ where: { id, teams: { some: { id: teamId } } } });
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function checkDNS(request: FastifyRequest<CheckDNS>) {
try {
const { id } = request.params
let { exposePort, fqdn, forceSave, dualCerts } = request.body
fqdn = fqdn.toLowerCase();
const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
const found = await isDomainConfigured({ id, fqdn });
if (found) {
throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` }
}
if (exposePort) {
exposePort = Number(exposePort);
if (exposePort < 1024 || exposePort > 65535) {
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` }
}
const { default: getPort } = await import('get-port');
const publicPort = await getPort({ port: exposePort });
if (publicPort !== exposePort) {
throw { status: 500, message: `Port ${exposePort} is already in use.` }
}
}
if (isDNSCheckEnabled && !isDev && !forceSave) {
return await checkDomainsIsValidInDNS({ hostname: request.hostname.split(':')[0], fqdn, dualCerts });
}
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getUsage(request) {
try {
const { id } = request.params
const teamId = request.user?.teamId;
let usage = {};
const application: any = await getApplicationFromDB(id, teamId);
if (application.destinationDockerId) {
[usage] = await Promise.all([getContainerUsage(application.destinationDocker.engine, id)]);
}
return {
usage
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deployApplication(request: FastifyRequest<DeployApplication>) {
try {
const { id } = request.params
const teamId = request.user?.teamId;
const { pullmergeRequestId = null, branch } = request.body
const buildId = cuid();
const application = await getApplicationFromDB(id, teamId);
if (application) {
if (!application?.configHash) {
const configHash = crypto.createHash('sha256')
.update(
JSON.stringify({
buildPack: application.buildPack,
port: application.port,
exposePort: application.exposePort,
installCommand: application.installCommand,
buildCommand: application.buildCommand,
startCommand: application.startCommand
})
)
.digest('hex');
await prisma.application.update({ where: { id }, data: { configHash } });
}
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
await prisma.build.create({
data: {
id: buildId,
applicationId: id,
branch: application.branch,
destinationDockerId: application.destinationDocker?.id,
gitSourceId: application.gitSource?.id,
githubAppId: application.gitSource?.githubApp?.id,
gitlabAppId: application.gitSource?.gitlabApp?.id,
status: 'queued',
type: 'manual'
}
});
if (pullmergeRequestId) {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application,
sourceBranch: branch,
pullmergeRequestId
});
} else {
scheduler.workers.get('deployApplication').postMessage({
build_id: buildId,
type: 'manual',
...application
});
}
return {
buildId
};
}
throw { status: 500, message: 'Application not found!' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveApplicationSource(request: FastifyRequest<SaveApplicationSource>, reply: FastifyReply) {
try {
const { id } = request.params
const { gitSourceId } = request.body
await prisma.application.update({
where: { id },
data: { gitSource: { connect: { id: gitSourceId } } }
});
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getGitHubToken(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const { teamId } = request.user
const application: any = await getApplicationFromDB(id, teamId);
const payload = {
iat: Math.round(new Date().getTime() / 1000),
exp: Math.round(new Date().getTime() / 1000 + 60),
iss: application.gitSource.githubApp.appId
};
const githubToken = jsonwebtoken.sign(payload, application.gitSource.githubApp.privateKey, {
algorithm: 'RS256'
});
const { data } = await axios.post(`${application.gitSource.apiUrl}/app/installations/${application.gitSource.githubApp.installationId}/access_tokens`, {}, {
headers: {
Authorization: `Bearer ${githubToken}`
}
})
return reply.code(201).send({
token: data.token
})
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function checkRepository(request: FastifyRequest<CheckRepository>) {
try {
const { id } = request.params
const { repository, branch } = request.query
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: true }
});
const found = await prisma.application.findFirst({
where: { branch, repository, gitSource: { type: application.gitSource.type }, id: { not: id } }
});
return {
used: found ? true : false
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveRepository(request, reply) {
try {
const { id } = request.params
let { repository, branch, projectId, autodeploy, webhookToken } = request.body
repository = repository.toLowerCase();
branch = branch.toLowerCase();
projectId = Number(projectId);
if (webhookToken) {
await prisma.application.update({
where: { id },
data: { repository, branch, projectId, gitSource: { update: { gitlabApp: { update: { webhookToken: webhookToken ? webhookToken : undefined } } } }, settings: { update: { autodeploy } } }
});
} else {
await prisma.application.update({
where: { id },
data: { repository, branch, projectId, settings: { update: { autodeploy } } }
});
}
const isDouble = await checkDoubleBranch(branch, projectId);
if (isDouble) {
await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false } })
}
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDestination(request: FastifyRequest<SaveDestination>, reply: FastifyReply) {
try {
const { id } = request.params
const { destinationId } = request.body
await prisma.application.update({
where: { id },
data: { destinationDocker: { connect: { id: destinationId } } }
});
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getBuildPack(request) {
try {
const { id } = request.params
const teamId = request.user?.teamId;
const application: any = await getApplicationFromDB(id, teamId);
return {
type: application.gitSource.type,
projectId: application.projectId,
repository: application.repository,
branch: application.branch,
apiUrl: application.gitSource.apiUrl
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveBuildPack(request, reply) {
try {
const { id } = request.params
const { buildPack } = request.body
await prisma.application.update({ where: { id }, data: { buildPack } });
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getSecrets(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
let secrets = await prisma.secret.findMany({
where: { applicationId: id },
orderBy: { createdAt: 'desc' }
});
secrets = secrets.map((secret) => {
secret.value = decrypt(secret.value);
return secret;
});
secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
})
return {
secrets
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
try {
const { id } = request.params
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body
if (isNew) {
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
if (found) {
throw { status: 500, message: `Secret ${name} already exists.` }
} else {
value = encrypt(value);
await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
});
}
} else {
value = encrypt(value);
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
if (found) {
await prisma.secret.updateMany({
where: { applicationId: id, name, isPRMRSecret },
data: { value, isBuildSecret, isPRMRSecret }
});
} else {
await prisma.secret.create({
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
});
}
}
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteSecret(request: FastifyRequest<DeleteSecret>) {
try {
const { id } = request.params
const { name } = request.body
await prisma.secret.deleteMany({ where: { applicationId: id, name } });
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getStorages(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const persistentStorages = await prisma.applicationPersistentStorage.findMany({ where: { applicationId: id } });
return {
persistentStorages
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveStorage(request: FastifyRequest<SaveStorage>, reply: FastifyReply) {
try {
const { id } = request.params
const { path, newStorage, storageId } = request.body
if (newStorage) {
await prisma.applicationPersistentStorage.create({
data: { path, application: { connect: { id } } }
});
} else {
await prisma.applicationPersistentStorage.update({
where: { id: storageId },
data: { path }
});
}
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteStorage(request: FastifyRequest<DeleteStorage>) {
try {
const { id } = request.params
const { path } = request.body
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id, path } });
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getPreviews(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const { teamId } = request.user
let secrets = await prisma.secret.findMany({
where: { applicationId: id },
orderBy: { createdAt: 'desc' }
});
secrets = secrets.map((secret) => {
secret.value = decrypt(secret.value);
return secret;
});
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
const destinationDocker = await prisma.destinationDocker.findFirst({
where: { application: { some: { id } }, teams: { some: { id: teamId } } }
});
const docker = dockerInstance({ destinationDocker });
const listContainers = await docker.engine.listContainers({
filters: { network: [destinationDocker.network], name: [id] }
});
const containers = listContainers.filter((container) => {
return (
container.Labels['coolify.configuration'] &&
container.Labels['coolify.type'] === 'standalone-application'
);
});
const jsonContainers = containers
.map((container) =>
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
)
.filter((container) => {
return container.pullmergeRequestId && container.applicationId === id;
});
return {
containers: jsonContainers,
applicationSecrets: applicationSecrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
}),
PRMRSecrets: PRMRSecrets.sort((a, b) => {
return ('' + a.name).localeCompare(b.name);
})
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getApplicationLogs(request: FastifyRequest<GetApplicationLogs>) {
try {
const { id } = request.params
let { since = 0 } = request.query
if (since !== 0) {
since = day(since).unix();
}
const { destinationDockerId, destinationDocker } = await prisma.application.findUnique({
where: { id },
include: { destinationDocker: true }
});
if (destinationDockerId) {
const docker = dockerInstance({ destinationDocker });
try {
const container = await docker.engine.getContainer(id);
if (container) {
const { default: ansi } = await import('strip-ansi')
const logs = (
await container.logs({
stdout: true,
stderr: true,
timestamps: true,
since,
tail: 5000
})
)
.toString()
.split('\n')
.map((l) => ansi(l.slice(8)))
.filter((a) => a);
return {
logs
};
}
} catch (error) {
return {
logs: []
};
}
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
try {
const { id } = request.params
let { buildId, skip = 0 } = request.query
if (typeof skip !== 'number') {
skip = Number(skip)
}
let builds = [];
const buildCount = await prisma.build.count({ where: { applicationId: id } });
if (buildId) {
builds = await prisma.build.findMany({ where: { applicationId: id, id: buildId } });
} else {
builds = await prisma.build.findMany({
where: { applicationId: id },
orderBy: { createdAt: 'desc' },
take: 5,
skip
});
}
builds = builds.map((build) => {
const updatedAt = day(build.updatedAt).utc();
build.took = updatedAt.diff(day(build.createdAt)) / 1000;
build.since = updatedAt.fromNow();
return build;
});
return {
builds,
buildCount
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getBuildIdLogs(request: FastifyRequest<GetBuildIdLogs>) {
try {
const { buildId } = request.params
let { sequence = 0 } = request.query
if (typeof sequence !== 'number') {
sequence = Number(sequence)
}
let logs = await prisma.buildLog.findMany({
where: { buildId, time: { gt: sequence } },
orderBy: { time: 'asc' }
});
const data = await prisma.build.findFirst({ where: { id: buildId } });
return {
logs,
status: data?.status || 'queued'
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getGitLabSSHKey(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }
});
return { publicKey: application.gitSource.gitlabApp.publicSshKey };
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveGitLabSSHKey(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
try {
const { id } = request.params
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }
});
if (!application.gitSource?.gitlabApp?.privateSshKey) {
const keys = await generateSshKeyPair();
const encryptedPrivateKey = encrypt(keys.privateKey);
await prisma.gitlabApp.update({
where: { id: application.gitSource.gitlabApp.id },
data: { privateSshKey: encryptedPrivateKey, publicSshKey: keys.publicKey }
});
return reply.code(201).send({ publicKey: keys.publicKey })
}
return { message: 'SSH key already exists' }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDeployKey(request: FastifyRequest<SaveDeployKey>, reply: FastifyReply) {
try {
const { id } = request.params
let { deployKeyId } = request.body;
deployKeyId = Number(deployKeyId);
const application = await prisma.application.findUnique({
where: { id },
include: { gitSource: { include: { gitlabApp: true } } }
});
await prisma.gitlabApp.update({
where: { id: application.gitSource.gitlabApp.id },
data: { deployKeyId }
});
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function cancelDeployment(request: FastifyRequest<CancelDeployment>, reply: FastifyReply) {
try {
const { buildId, applicationId } = request.body;
if (!buildId) {
throw { status: 500, message: 'buildId is required' }
}
await stopBuild(buildId, applicationId);
return reply.code(201).send()
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

View File

@@ -0,0 +1,60 @@
import { FastifyPluginAsync } from 'fastify';
import { OnlyId } from '../../../../types';
import { cancelDeployment, checkDNS, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication } from './handlers';
import type { CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, GetImages, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.addHook('onRequest', async (request) => {
return await request.jwtVerify()
})
fastify.get('/', async (request) => await listApplications(request));
fastify.post<GetImages>('/images', async (request) => await getImages(request));
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
fastify.get<OnlyId>('/:id', async (request) => await getApplication(request));
fastify.post<SaveApplication>('/:id', async (request, reply) => await saveApplication(request, reply));
fastify.delete<DeleteApplication>('/:id', async (request, reply) => await deleteApplication(request, reply));
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
fastify.post<SaveApplicationSettings>('/:id/settings', async (request, reply) => await saveApplicationSettings(request, reply));
fastify.post<CheckDNS>('/:id/check', async (request) => await checkDNS(request));
fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request));
fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request));
fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request));
fastify.post<SaveStorage>('/:id/storages', async (request, reply) => await saveStorage(request, reply));
fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request));
fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request));
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
fastify.get<GetBuildLogs>('/:id/logs/build', async (request) => await getBuildLogs(request));
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
fastify.get('/:id/usage', async (request) => await getUsage(request))
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
fastify.post<SaveApplicationSource>('/:id/configuration/source', async (request, reply) => await saveApplicationSource(request, reply));
fastify.get<CheckRepository>('/:id/configuration/repository', async (request) => await checkRepository(request));
fastify.post('/:id/configuration/repository', async (request, reply) => await saveRepository(request, reply));
fastify.post<SaveDestination>('/:id/configuration/destination', async (request, reply) => await saveDestination(request, reply));
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
fastify.post<OnlyId>('/:id/configuration/sshkey', async (request, reply) => await saveGitLabSSHKey(request, reply));
fastify.post<SaveDeployKey>('/:id/configuration/deploykey', async (request, reply) => await saveDeployKey(request, reply));
fastify.get<OnlyId>('/:id/configuration/githubToken', async (request, reply) => await getGitHubToken(request, reply));
};
export default root;

View File

@@ -0,0 +1,117 @@
import type { OnlyId } from "../../../../types";
export interface SaveApplication extends OnlyId {
Body: {
name: string,
buildPack: string,
fqdn: string,
port: number,
exposePort: number,
installCommand: string,
buildCommand: string,
startCommand: string,
baseDirectory: string,
publishDirectory: string,
pythonWSGI: string,
pythonModule: string,
pythonVariable: string,
dockerFileLocation: string,
denoMainFile: string,
denoOptions: string,
baseImage: string,
baseBuildImage: string,
deploymentType: string
}
}
export interface SaveApplicationSettings extends OnlyId {
Querystring: { domain: string; };
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; };
}
export interface DeleteApplication extends OnlyId {
Querystring: { domain: string; };
}
export interface CheckDNS extends OnlyId {
Querystring: { domain: string; };
Body: {
exposePort: number,
fqdn: string,
forceSave: boolean,
dualCerts: boolean
}
}
export interface DeployApplication {
Querystring: { domain: string }
Body: { pullmergeRequestId: string | null, branch: string }
}
export interface GetImages {
Body: { buildPack: string, deploymentType: string }
}
export interface SaveApplicationSource extends OnlyId {
Body: { gitSourceId: string }
}
export interface CheckRepository extends OnlyId {
Querystring: { repository: string, branch: string }
}
export interface SaveDestination extends OnlyId {
Body: { destinationId: string }
}
export interface SaveSecret extends OnlyId {
Body: {
name: string,
value: string,
isBuildSecret: boolean,
isPRMRSecret: boolean,
isNew: boolean
}
}
export interface DeleteSecret extends OnlyId {
Body: { name: string }
}
export interface SaveStorage extends OnlyId {
Body: {
path: string,
newStorage: boolean,
storageId: string
}
}
export interface DeleteStorage extends OnlyId {
Body: {
path: string,
}
}
export interface GetApplicationLogs extends OnlyId {
Querystring: {
since: number,
}
}
export interface GetBuildLogs extends OnlyId {
Querystring: {
buildId: string
skip: number,
}
}
export interface GetBuildIdLogs {
Params: {
buildId: string
},
Querystring: {
sequence: number
}
}
export interface SaveDeployKey extends OnlyId {
Body: {
deployKeyId: number
}
}
export interface CancelDeployment {
Body: {
buildId: string,
applicationId: string
}
}
export interface DeployApplication extends OnlyId {
Body: {
pullmergeRequestId: string | null,
branch: string
}
}

View File

@@ -0,0 +1,19 @@
import { FastifyPluginAsync } from 'fastify';
import { errorHandler, version } from '../../../../lib/common';
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get('/', async () => {
try {
return {
version,
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
});
};
export default root;

View File

@@ -0,0 +1,470 @@
import cuid from 'cuid';
import type { FastifyRequest } from 'fastify';
import { FastifyReply } from 'fastify';
import yaml from 'js-yaml';
import fs from 'fs/promises';
import { asyncExecShell, ComposeFile, createDirectories, decrypt, encrypt, errorHandler, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePort, listSettings, makeLabelForStandaloneDatabase, prisma, startTcpProxy, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
import { dockerInstance, getEngine } from '../../../../lib/docker';
import { day } from '../../../../lib/dayjs';
import { GetDatabaseLogs, OnlyId, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSettings, SaveVersion } from '../../../../types';
import { SaveDatabaseType } from './types';
export async function listDatabases(request: FastifyRequest) {
try {
const teamId = request.user.teamId;
let databases = []
if (teamId === '0') {
databases = await prisma.database.findMany({ include: { teams: true } });
} else {
databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId } } },
include: { teams: true }
});
}
return {
databases
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function newDatabase(request: FastifyRequest, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const name = uniqueName();
const dbUser = cuid();
const dbUserPassword = encrypt(generatePassword());
const rootUser = cuid();
const rootUserPassword = encrypt(generatePassword());
const defaultDatabase = cuid();
const { id } = await prisma.database.create({
data: {
name,
defaultDatabase,
dbUser,
dbUserPassword,
rootUser,
rootUserPassword,
teams: { connect: { id: teamId } },
settings: { create: { isPublic: false } }
}
});
return reply.code(201).send({ id })
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getDatabase(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params;
const teamId = request.user.teamId;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (!database) {
throw { status: 404, message: 'Database not found.' }
}
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
const { destinationDockerId, destinationDocker } = database;
let isRunning = false;
if (destinationDockerId) {
const host = getEngine(destinationDocker.engine);
try {
const { stdout } = await asyncExecShell(
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
);
if (JSON.parse(stdout).Running) {
isRunning = true;
}
} catch (error) {
//
}
}
const configuration = generateDatabaseConfiguration(database);
const settings = await listSettings();
return {
privatePort: configuration?.privatePort,
database,
isRunning,
versions: await getDatabaseVersions(database.type),
settings
};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getDatabaseTypes(request: FastifyRequest) {
try {
return {
types: supportedDatabaseTypesAndVersions
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDatabaseType(request: FastifyRequest<SaveDatabaseType>, reply: FastifyReply) {
try {
const { id } = request.params;
const { type } = request.body;
await prisma.database.update({
where: { id },
data: { type }
});
return reply.code(201).send({})
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getVersions(request: FastifyRequest<OnlyId>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const { type } = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
return {
versions: supportedDatabaseTypesAndVersions.find((name) => name.name === type).versions
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveVersion(request: FastifyRequest<SaveVersion>, reply: FastifyReply) {
try {
const { id } = request.params;
const { version } = request.body;
await prisma.database.update({
where: { id },
data: {
version,
}
});
return reply.code(201).send({})
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDatabaseDestination(request: FastifyRequest<SaveDatabaseDestination>, reply: FastifyReply) {
try {
const { id } = request.params;
const { destinationId } = request.body;
await prisma.database.update({
where: { id },
data: { destinationDocker: { connect: { id: destinationId } } }
});
const {
destinationDockerId,
destinationDocker: { engine },
version,
type
} = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } });
if (destinationDockerId) {
const host = getEngine(engine);
if (type && version) {
const baseImage = getDatabaseImage(type);
asyncExecShell(`DOCKER_HOST=${host} docker pull ${baseImage}:${version}`);
}
}
return reply.code(201).send({})
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getDatabaseUsage(request: FastifyRequest<OnlyId>) {
try {
const { id } = request.params;
const teamId = request.user.teamId;
let usage = {};
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
if (database.destinationDockerId) {
[usage] = await Promise.all([getContainerUsage(database.destinationDocker.engine, id)]);
}
return {
usage
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function startDatabase(request: FastifyRequest<OnlyId>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
const {
type,
destinationDockerId,
destinationDocker,
publicPort,
settings: { isPublic }
} = database;
const { privatePort, environmentVariables, image, volume, ulimits } =
generateDatabaseConfiguration(database);
const network = destinationDockerId && destinationDocker.network;
const host = getEngine(destinationDocker.engine);
const volumeName = volume.split(':')[0];
const labels = await makeLabelForStandaloneDatabase({ id, image, volume });
const { workdir } = await createDirectories({ repository: type, buildId: id });
const composeFile: ComposeFile = {
version: '3.8',
services: {
[id]: {
container_name: id,
image,
networks: [network],
environment: environmentVariables,
volumes: [volume],
ulimits,
labels,
restart: 'always',
deploy: {
restart_policy: {
condition: 'on-failure',
delay: '5s',
max_attempts: 3,
window: '120s'
}
}
}
},
networks: {
[network]: {
external: true
}
},
volumes: {
[volumeName]: {
external: true
}
}
};
const composeFileDestination = `${workdir}/docker-compose.yaml`;
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
try {
await asyncExecShell(`DOCKER_HOST=${host} docker volume create ${volumeName}`);
} catch (error) {
console.log(error);
}
try {
await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} up -d`);
if (isPublic) await startTcpProxy(destinationDocker, id, publicPort, privatePort);
return {};
} catch (error) {
throw {
error
};
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function stopDatabase(request: FastifyRequest<OnlyId>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
const everStarted = await stopDatabaseContainer(database);
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
await prisma.database.update({
where: { id },
data: {
settings: { upsert: { update: { isPublic: false }, create: { isPublic: false } } }
}
});
await prisma.database.update({ where: { id }, data: { publicPort: null } });
return {};
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function getDatabaseLogs(request: FastifyRequest<GetDatabaseLogs>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
let { since = 0 } = request.query
if (since !== 0) {
since = day(since).unix();
}
const { destinationDockerId, destinationDocker } = await prisma.database.findUnique({
where: { id },
include: { destinationDocker: true }
});
if (destinationDockerId) {
const docker = dockerInstance({ destinationDocker });
try {
const container = await docker.engine.getContainer(id);
if (container) {
const { default: ansi } = await import('strip-ansi')
const logs = (
await container.logs({
stdout: true,
stderr: true,
timestamps: true,
since,
tail: 5000
})
)
.toString()
.split('\n')
.map((l) => ansi(l.slice(8)))
.filter((a) => a);
return {
logs
};
}
} catch (error) {
const { statusCode } = error;
if (statusCode === 404) {
return {
logs: []
};
}
}
}
return {
message: 'No logs found.'
}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function deleteDatabase(request: FastifyRequest<OnlyId>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
if (database.destinationDockerId) {
const everStarted = await stopDatabaseContainer(database);
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
}
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } });
return {}
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDatabase(request: FastifyRequest<SaveDatabase>, reply: FastifyReply) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const {
name,
defaultDatabase,
dbUser,
dbUserPassword,
rootUser,
rootUserPassword,
version,
isRunning
} = request.body;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
if (isRunning) {
if (database.dbUserPassword !== dbUserPassword) {
await updatePasswordInDb(database, dbUser, dbUserPassword, false);
} else if (database.rootUserPassword !== rootUserPassword) {
await updatePasswordInDb(database, rootUser, rootUserPassword, true);
}
}
const encryptedDbUserPassword = dbUserPassword && encrypt(dbUserPassword);
const encryptedRootUserPassword = rootUserPassword && encrypt(rootUserPassword);
await prisma.database.update({
where: { id },
data: {
name,
defaultDatabase,
dbUser,
dbUserPassword: encryptedDbUserPassword,
rootUser,
rootUserPassword: encryptedRootUserPassword,
version
}
});
return reply.code(201).send({})
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}
export async function saveDatabaseSettings(request: FastifyRequest<SaveDatabaseSettings>) {
try {
const teamId = request.user.teamId;
const { id } = request.params;
const { isPublic, appendOnly = true } = request.body;
const publicPort = await getFreePort();
const settings = await listSettings();
await prisma.database.update({
where: { id },
data: {
settings: { upsert: { update: { isPublic, appendOnly }, create: { isPublic, appendOnly } } }
}
});
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
const { destinationDockerId, destinationDocker, publicPort: oldPublicPort } = database;
const { privatePort } = generateDatabaseConfiguration(database);
if (destinationDockerId) {
if (isPublic) {
await prisma.database.update({ where: { id }, data: { publicPort } });
if (settings.isTraefikUsed) {
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
} else {
await startTcpProxy(destinationDocker, id, publicPort, privatePort);
}
} else {
await prisma.database.update({ where: { id }, data: { publicPort: null } });
await stopTcpHttpProxy(id, destinationDocker, oldPublicPort);
}
}
return { publicPort }
} catch ({ status, message }) {
return errorHandler({ status, message })
}
}

Some files were not shown because too many files have changed in this diff Show More