mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-18 20:59:24 +00:00
Compare commits
252 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c43b848708 | ||
|
|
e22950cecb | ||
|
|
5a7edcb762 | ||
|
|
9b47de71fc | ||
|
|
8f9462245a | ||
|
|
f0ed51cd22 | ||
|
|
99a7eff6ab | ||
|
|
edfed57df3 | ||
|
|
a70e35cb79 | ||
|
|
3da1b31363 | ||
|
|
59bc2dd8a7 | ||
|
|
7b6e7680a6 | ||
|
|
1c65df282e | ||
|
|
77ae070c98 | ||
|
|
20708f1456 | ||
|
|
9a1a67a4ef | ||
|
|
3a8e5df897 | ||
|
|
88a62be30c | ||
|
|
c478c1b7ad | ||
|
|
18c2b2e38e | ||
|
|
b105e6fbf8 | ||
|
|
da11bae67c | ||
|
|
d344a9bb4f | ||
|
|
222adb212b | ||
|
|
cb01bbe4ac | ||
|
|
c63237684a | ||
|
|
792d51d93f | ||
|
|
62bfb5dacc | ||
|
|
d7fa80703d | ||
|
|
52b712d90b | ||
|
|
331e13b7cb | ||
|
|
64bb4a2525 | ||
|
|
31d7e7e806 | ||
|
|
e740788d6c | ||
|
|
928d53e532 | ||
|
|
87ba4560ad | ||
|
|
9137e8bc32 | ||
|
|
35bd2b23d5 | ||
|
|
10a514d9ac | ||
|
|
71096acdff | ||
|
|
07da696397 | ||
|
|
41baf150c2 | ||
|
|
f0a52b2ef4 | ||
|
|
54e83fdff1 | ||
|
|
46327ff2fc | ||
|
|
8b26acc841 | ||
|
|
b90cb5a731 | ||
|
|
cd9b642c5e | ||
|
|
41a928d41b | ||
|
|
1388bee62c | ||
|
|
8ebc778d40 | ||
|
|
3a59091b41 | ||
|
|
e764c4651c | ||
|
|
10f04d2177 | ||
|
|
119f994b50 | ||
|
|
e39541c318 | ||
|
|
cf88885c94 | ||
|
|
1192346ce3 | ||
|
|
0e3bd85847 | ||
|
|
edeb6c6965 | ||
|
|
138fd5cb6d | ||
|
|
155410bd44 | ||
|
|
20bd829c2e | ||
|
|
7b7e222946 | ||
|
|
98d901d06c | ||
|
|
4e862cda6f | ||
|
|
4e940807ae | ||
|
|
b081743f54 | ||
|
|
34bb9f301f | ||
|
|
ed8a6daeea | ||
|
|
9e81ab43ac | ||
|
|
32d94cbe97 | ||
|
|
46a83aa457 | ||
|
|
08d7593ca9 | ||
|
|
a50f7a7cc2 | ||
|
|
2c33447f9f | ||
|
|
d67a3f51ec | ||
|
|
2719974262 | ||
|
|
eb5aebd58d | ||
|
|
98dbf3d8a5 | ||
|
|
d9489a2cb4 | ||
|
|
95832d34f7 | ||
|
|
d3e9aea63d | ||
|
|
d6972e2ed1 | ||
|
|
50844e98be | ||
|
|
5c6fcfebf9 | ||
|
|
84cfe6fb42 | ||
|
|
abf0aeb2a8 | ||
|
|
a7aca0ce8b | ||
|
|
67bb5d973b | ||
|
|
662948d622 | ||
|
|
f5bedfdf7f | ||
|
|
db9db61d92 | ||
|
|
d255cb1973 | ||
|
|
6529271de2 | ||
|
|
0dd32b5319 | ||
|
|
b032da798b | ||
|
|
a1a9f1531e | ||
|
|
f71b54deb2 | ||
|
|
c63430e342 | ||
|
|
6821b128ad | ||
|
|
3f8d44a01c | ||
|
|
3aef04437c | ||
|
|
53e32c038b | ||
|
|
1660510614 | ||
|
|
69f5601b3e | ||
|
|
6e22fecc98 | ||
|
|
d18b2b6a1f | ||
|
|
4b0370ac08 | ||
|
|
750ef80777 | ||
|
|
59c62923be | ||
|
|
68b220d06e | ||
|
|
250ea64203 | ||
|
|
0ab57396d2 | ||
|
|
1e36856e65 | ||
|
|
cfdc8db543 | ||
|
|
1f25bc411f | ||
|
|
972f77c790 | ||
|
|
795f99bb47 | ||
|
|
54f7142b2b | ||
|
|
26eacfc2c0 | ||
|
|
e2bf02841f | ||
|
|
6a59b8d27c | ||
|
|
7fc43ef2bb | ||
|
|
70a3fc247e | ||
|
|
56ab8312f1 | ||
|
|
6fb6a514ac | ||
|
|
b01f5f47b3 | ||
|
|
ebdd3601b3 | ||
|
|
c0d711170b | ||
|
|
da86f0076b | ||
|
|
9d8551a9be | ||
|
|
01ea86479d | ||
|
|
eb62888c39 | ||
|
|
b006fe8f68 | ||
|
|
dc3add495c | ||
|
|
59086e9eb4 | ||
|
|
e563988596 | ||
|
|
5a206a140c | ||
|
|
dbf910ff38 | ||
|
|
35b31dce2b | ||
|
|
1ec620be4b | ||
|
|
8516ac671a | ||
|
|
3b7fdebe8c | ||
|
|
17ac3048ac | ||
|
|
4e43efef50 | ||
|
|
4f4f5b1c01 | ||
|
|
1fa5c5e021 | ||
|
|
436e0e3a2b | ||
|
|
e717c1d599 | ||
|
|
ae5d90eb47 | ||
|
|
c095cb58b3 | ||
|
|
6bba37c36d | ||
|
|
60a428a952 | ||
|
|
c376123877 | ||
|
|
cd3663038f | ||
|
|
16b7c1708b | ||
|
|
3435f92fcb | ||
|
|
cef571b8cc | ||
|
|
242bc61e2d | ||
|
|
c917135bd3 | ||
|
|
3802158ad5 | ||
|
|
e452f68614 | ||
|
|
9586213dd1 | ||
|
|
30781f218c | ||
|
|
697c42ff66 | ||
|
|
37d8f1847c | ||
|
|
2af13fff55 | ||
|
|
51e8ca8de0 | ||
|
|
06228cd2a7 | ||
|
|
0033baafdc | ||
|
|
79dfc6a660 | ||
|
|
972b0fa811 | ||
|
|
ad51a9ebc8 | ||
|
|
51a40d049d | ||
|
|
8b3113bd92 | ||
|
|
d6b6938555 | ||
|
|
ce52608f19 | ||
|
|
ede37d296b | ||
|
|
6374b1284b | ||
|
|
6ac8dd8907 | ||
|
|
24c655d7ef | ||
|
|
1f087cc29a | ||
|
|
c3684a1650 | ||
|
|
a410fd0776 | ||
|
|
271fb1358d | ||
|
|
a4d53a28eb | ||
|
|
e69e32f6c7 | ||
|
|
650409dde3 | ||
|
|
f3f4bb5105 | ||
|
|
9c02af6b52 | ||
|
|
6a3f4ba171 | ||
|
|
6a6426fe6b | ||
|
|
21256746c3 | ||
|
|
c34d643f95 | ||
|
|
0be402af82 | ||
|
|
b5b0b6524d | ||
|
|
22f1a3c908 | ||
|
|
fa5f439858 | ||
|
|
7cc760eecf | ||
|
|
af0652f6b2 | ||
|
|
9e009bebaa | ||
|
|
8e53ae3484 | ||
|
|
7ceb8f1537 | ||
|
|
b0eae8cfe9 | ||
|
|
febef372b8 | ||
|
|
a18e3659aa | ||
|
|
e2e342851a | ||
|
|
bee3292088 | ||
|
|
f56d4dbbb3 | ||
|
|
eccd7c96d7 | ||
|
|
4046c472ed | ||
|
|
0da4a1024a | ||
|
|
aa2f328640 | ||
|
|
4d22b610b6 | ||
|
|
e91c3eab9c | ||
|
|
2e8fd6f0c7 | ||
|
|
90fde24b40 | ||
|
|
02a1f50776 | ||
|
|
57b97a9204 | ||
|
|
1ec03693d3 | ||
|
|
4246d86694 | ||
|
|
2cce1f8459 | ||
|
|
3937cfec53 | ||
|
|
259aeeb67a | ||
|
|
9d53bc0926 | ||
|
|
1211f3c9fd | ||
|
|
c07d6aa702 | ||
|
|
4f662dbf21 | ||
|
|
a4301c5d23 | ||
|
|
86b7824c78 | ||
|
|
435f063c36 | ||
|
|
902a764ff2 | ||
|
|
4097378847 | ||
|
|
5f3567e808 | ||
|
|
7325353ced | ||
|
|
68f5b32876 | ||
|
|
326f0dac1b | ||
|
|
828faaf2b1 | ||
|
|
9582664406 | ||
|
|
ec5474b72b | ||
|
|
62d1011d9f | ||
|
|
0a7ec6bd20 | ||
|
|
b84c37cd8f | ||
|
|
887d65e512 | ||
|
|
3543a9c809 | ||
|
|
f94e17134e | ||
|
|
40cbee0d75 | ||
|
|
c98ed5338a | ||
|
|
27f1e1d7cd | ||
|
|
8f3f9ebade | ||
|
|
1bd33fea98 |
16
.devcontainer/Dockerfile
Normal file
16
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node/.devcontainer/base.Dockerfile
|
||||||
|
|
||||||
|
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||||
|
ARG VARIANT="16-bullseye"
|
||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||||
|
|
||||||
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||||
|
|
||||||
|
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||||
|
# ARG EXTRA_NODE_VERSION=10
|
||||||
|
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||||
|
|
||||||
|
# [Optional] Uncomment if you want to install more global node modules
|
||||||
|
RUN su node -c "npm install -g pnpm"
|
||||||
28
.devcontainer/devcontainer.json
Normal file
28
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||||
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/javascript-node
|
||||||
|
{
|
||||||
|
"name": "Node.js",
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||||
|
// Append -bullseye or -buster to pin to an OS version.
|
||||||
|
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||||
|
"args": {
|
||||||
|
"VARIANT": "16-bullseye"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"settings": {},
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": ["dbaeumer.vscode-eslint", "svelte.svelte-vscode"],
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
"forwardPorts": [3000],
|
||||||
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
|
"postCreateCommand": "cp .env.template .env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||||
|
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||||
|
"remoteUser": "node",
|
||||||
|
"features": {
|
||||||
|
"docker-in-docker": "20.10",
|
||||||
|
"github-cli": "latest"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,12 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
node_modules
|
node_modules
|
||||||
/build
|
build
|
||||||
/.svelte-kit
|
.svelte-kit
|
||||||
/package
|
package
|
||||||
/yarn.lock
|
|
||||||
/.pnpm-store
|
|
||||||
/ssl
|
|
||||||
|
|
||||||
.env
|
.env
|
||||||
.env.prod
|
.env.*
|
||||||
.env.stag
|
!.env.example
|
||||||
/db/*.db
|
dist
|
||||||
/db/*.db-journal
|
client
|
||||||
/data/haproxy/haproxy.cfg
|
apps/api/db/*.db
|
||||||
/data/haproxy/haproxy.cfg.lkg
|
local-serve
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
COOLIFY_APP_ID=
|
|
||||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
|
||||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
|
||||||
COOLIFY_SENTRY_DSN=
|
|
||||||
COOLIFY_IS_ON=docker
|
|
||||||
COOLIFY_WHITE_LABELED=false
|
|
||||||
COOLIFY_WHITE_LABELED_ICON=
|
|
||||||
COOLIFY_AUTO_UPDATE=false
|
|
||||||
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
47
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: 🐞 Bug report
|
||||||
|
description: Create a bug report to help us improve coolify
|
||||||
|
title: "[Bug]: "
|
||||||
|
labels: [Bug]
|
||||||
|
assignees:
|
||||||
|
- andrasbacsai
|
||||||
|
- vasani-arpit
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this bug report! Please fill the form in English
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is there an existing issue for this?
|
||||||
|
options:
|
||||||
|
- label: I have searched the existing issues
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Description
|
||||||
|
description: A concise description of what you're experiencing and what you expect.
|
||||||
|
placeholder: |
|
||||||
|
When I do <X>, <Y> happens and I see the error message attached below:
|
||||||
|
```...```
|
||||||
|
What I expect is <Z>
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps To Reproduce
|
||||||
|
description: Add steps to reproduce this behaviour, include console / network logs & videos
|
||||||
|
placeholder: |
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: version
|
||||||
|
attributes:
|
||||||
|
label: Version
|
||||||
|
description: "The version of your coolify Instance"
|
||||||
|
placeholder: "2.5.2"
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
31
.github/ISSUE_TEMPLATE/--feature-request.yaml
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
name: 🛠️ Feature request
|
||||||
|
description: Suggest an idea to improve coolify
|
||||||
|
title: '[Feature]: '
|
||||||
|
labels: [Enhancement]
|
||||||
|
assignees:
|
||||||
|
- andrasbacsai
|
||||||
|
- vasani-arpit
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to request a feature for coolify! Please also add your request here to get feedback from the community: https://feedback.coolify.io/!
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is there an existing issue for this?
|
||||||
|
description: Please search to see if an issue related to this feature request already exists.
|
||||||
|
options:
|
||||||
|
- label: I have searched the existing issues
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: One paragraph description of the feature.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Why should this be worked on?
|
||||||
|
description: A concise description of the problems or use cases for this feature request.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/--task.yaml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: 📝 Task
|
||||||
|
description: Create a task for the team to work on
|
||||||
|
title: "[Task]: "
|
||||||
|
labels: [Task]
|
||||||
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is there an existing issue for this?
|
||||||
|
description: Please search to see if an issue related to this already exists.
|
||||||
|
options:
|
||||||
|
- label: I have searched the existing issues
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: SubTasks
|
||||||
|
placeholder: |
|
||||||
|
- Sub Task 1
|
||||||
|
- Sub Task 2
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
blank_issues_enabled: true
|
||||||
|
contact_links:
|
||||||
|
- name: 🤔 Questions and Help
|
||||||
|
url: https://discord.com/invite/6rDM4fkymF
|
||||||
|
about: Reach out to us on discord or our github discussions page.
|
||||||
|
- name: 🙋♂️ service request
|
||||||
|
url: https://feedback.coolify.io/
|
||||||
|
about: want to request a new service? for e.g wordpress, hasura, appwrite etc...
|
||||||
39
.github/workflows/github-actions.yml
vendored
Normal file
39
.github/workflows/github-actions.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
name: release-coolify
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: published
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
make-it-coolifyed:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
-
|
||||||
|
name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
-
|
||||||
|
name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
-
|
||||||
|
name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
|
-
|
||||||
|
name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||||
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||||
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||||
23
.gitignore
vendored
23
.gitignore
vendored
@@ -1,16 +1,13 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
node_modules
|
node_modules
|
||||||
/build
|
build
|
||||||
/.svelte-kit
|
.svelte-kit
|
||||||
/package
|
package
|
||||||
/yarn.lock
|
|
||||||
/.pnpm-store
|
|
||||||
/ssl
|
|
||||||
|
|
||||||
.env
|
.env
|
||||||
.env.prod
|
.env.*
|
||||||
.env.stag
|
!.env.example
|
||||||
/db/*.db
|
dist
|
||||||
/db/*.db-journal
|
client
|
||||||
/data/haproxy/haproxy.cfg
|
apps/api/db/*.db
|
||||||
/data/haproxy/haproxy.cfg.lkg
|
local-serve
|
||||||
|
apps/api/db/migration.db-journal
|
||||||
12
.gitpod.yml
Normal file
12
.gitpod.yml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# This configuration file was automatically generated by Gitpod.
|
||||||
|
# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file)
|
||||||
|
# and commit this file to your remote git repository to share the goodness with others.
|
||||||
|
image: node:18
|
||||||
|
tasks:
|
||||||
|
- before: curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||||
|
- init: pnpm install && pnpm db:push && pnpm db:seed
|
||||||
|
command: pnpm dev
|
||||||
|
|
||||||
|
ports:
|
||||||
|
- port: 3001
|
||||||
|
visibility: public
|
||||||
1
.husky/_/.gitignore
vendored
1
.husky/_/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
*
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
. "$(dirname "$0")/_/husky.sh"
|
|
||||||
|
|
||||||
pnpm lint-staged
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"**/*.{js,jsx,ts,tsx,cjs,svelte,json,css,scss,md,yaml}": [
|
|
||||||
"prettier --ignore-path .gitignore --write --plugin-search-dir=."
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||||
|
|
||||||
|
Contribution guide is for v2, not applicable for v3
|
||||||
|
|
||||||
## 🙋 Want to help?
|
## 🙋 Want to help?
|
||||||
|
|
||||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||||
@@ -15,7 +17,13 @@ This is a little list of what you can do to help the project:
|
|||||||
|
|
||||||
## 👋 Introduction
|
## 👋 Introduction
|
||||||
|
|
||||||
🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS.
|
### Setup with github codespaces
|
||||||
|
|
||||||
|
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||||
|
|
||||||
|
### Setup locally in your machine
|
||||||
|
|
||||||
|
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. 💡 Although windows users can use github codespaces for development
|
||||||
|
|
||||||
#### Recommended Pull Request Guideline
|
#### Recommended Pull Request Guideline
|
||||||
|
|
||||||
@@ -35,20 +43,16 @@ Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I re
|
|||||||
|
|
||||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||||
|
|
||||||
#### Setup a local development environment
|
#### Steps for local setup
|
||||||
|
|
||||||
- Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
1. Copy `.env.template` to `.env` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||||
- Install dependencies with `pnpm install`.
|
2. Install dependencies with `pnpm install`.
|
||||||
- Need to create a local SQlite database with `pnpm db:push`.
|
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||||
- This will apply all migrations at `db/dev.db`.
|
|
||||||
- Seed the database with base entities with `pnpm db:seed`
|
|
||||||
- You can start coding after starting `pnpm dev`.
|
|
||||||
|
|
||||||
#### How to start after you set up your local fork?
|
This will apply all migrations at `db/dev.db`.
|
||||||
|
|
||||||
This repository works better with [pnpm](https://pnpm.io) due to the lock file. I recommend you to give it a try and use `pnpm` as well because it is cool and efficient!
|
4. Seed the database with base entities with `pnpm db:seed`
|
||||||
|
5. You can start coding after starting `pnpm dev`.
|
||||||
You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
|
||||||
|
|
||||||
## 🧑💻 Developer contribution
|
## 🧑💻 Developer contribution
|
||||||
|
|
||||||
|
|||||||
35
Dockerfile
35
Dockerfile
@@ -1,17 +1,18 @@
|
|||||||
FROM node:16.14.2-alpine as install
|
FROM node:18-alpine as build
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
RUN apk add --no-cache curl
|
RUN apk add --no-cache curl
|
||||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||||
RUN pnpm add -g pnpm
|
|
||||||
|
|
||||||
COPY package*.json .
|
COPY . .
|
||||||
RUN pnpm install
|
RUN pnpm install
|
||||||
|
RUN pnpm build
|
||||||
|
|
||||||
FROM node:16.14.2-alpine
|
# Production build
|
||||||
ARG TARGETPLATFORM
|
FROM node:18-alpine
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
ENV NODE_ENV production
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
|
||||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||||
@@ -19,24 +20,24 @@ ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
|||||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||||
|
|
||||||
COPY --from=coollabsio/prisma-engine:latest /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
|
||||||
|
|
||||||
COPY --from=install /app/node_modules ./node_modules
|
COPY --from=coollabsio/prisma-engine:3.15 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||||
COPY . .
|
|
||||||
|
|
||||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl
|
||||||
RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@6
|
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||||
RUN pnpm add -g pnpm
|
|
||||||
RUN mkdir -p ~/.docker/cli-plugins/
|
RUN mkdir -p ~/.docker/cli-plugins/
|
||||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-20.10.9 -o /usr/bin/docker
|
||||||
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-compose-linux-2.3.4 -o ~/.docker/cli-plugins/docker-compose
|
||||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||||
|
|
||||||
RUN pnpm prisma generate
|
COPY --from=build /app/apps/api/build/ .
|
||||||
RUN pnpm build
|
COPY --from=build /app/apps/ui/build/ ./public
|
||||||
|
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||||
|
COPY --from=build /app/apps/api/package.json .
|
||||||
|
COPY --from=build /app/docker-compose.yaml .
|
||||||
|
|
||||||
|
RUN pnpm install -p
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
CMD ["pnpm", "start"]
|
CMD pnpm start
|
||||||
18
README.md
18
README.md
@@ -6,7 +6,11 @@ An open-source & self-hostable Heroku / Netlify alternative.
|
|||||||
|
|
||||||
https://demo.coolify.io/
|
https://demo.coolify.io/
|
||||||
|
|
||||||
(If it is unresponsive, that means someone overloaded the server. 🙃)
|
(If it is unresponsive, that means someone overloaded the server. 😄)
|
||||||
|
|
||||||
|
## Feedback
|
||||||
|
|
||||||
|
If you have a new service / build pack you would like to add, raise an idea [here](https://feedback.coolify.io/) to get feedback from the community!
|
||||||
|
|
||||||
## How to install
|
## How to install
|
||||||
|
|
||||||
@@ -26,6 +30,8 @@ For more details goto the [docs](https://docs.coollabs.io/coolify/installation).
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
|
ARM support is in beta!
|
||||||
|
|
||||||
### Git Sources
|
### Git Sources
|
||||||
|
|
||||||
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
You can use the following Git Sources to be auto-deployed to your Coolifyt instance! (Self-hosted versions are also supported.)
|
||||||
@@ -52,19 +58,21 @@ These are the predefined build packs, but with the Docker build pack, you can ho
|
|||||||
- NuxtJS
|
- NuxtJS
|
||||||
- NextJS
|
- NextJS
|
||||||
- React/Preact
|
- React/Preact
|
||||||
- NextJS
|
|
||||||
- Gatsby
|
- Gatsby
|
||||||
- Svelte
|
- Svelte
|
||||||
- PHP
|
- PHP
|
||||||
- Laravel
|
- Laravel
|
||||||
- Rust
|
- Rust
|
||||||
- Docker
|
- Docker
|
||||||
|
- Python
|
||||||
|
- Deno
|
||||||
|
|
||||||
### Databases
|
### Databases
|
||||||
|
|
||||||
One-click database is ready to be used internally or shared over the internet:
|
One-click database is ready to be used internally or shared over the internet:
|
||||||
|
|
||||||
- MongoDB
|
- MongoDB
|
||||||
|
- MariaDB
|
||||||
- MySQL
|
- MySQL
|
||||||
- PostgreSQL
|
- PostgreSQL
|
||||||
- CouchDB
|
- CouchDB
|
||||||
@@ -74,9 +82,9 @@ One-click database is ready to be used internally or shared over the internet:
|
|||||||
|
|
||||||
You can host cool open-source services as well:
|
You can host cool open-source services as well:
|
||||||
|
|
||||||
- [WordPress](https://wordpress.org)
|
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||||
- [Ghost](https://ghost.org)
|
- [Ghost](https://ghost.org)
|
||||||
- [Plausible Analytics](https://plausible.io)
|
- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics)
|
||||||
- [NocoDB](https://nocodb.com)
|
- [NocoDB](https://nocodb.com)
|
||||||
- [VSCode Server](https://github.com/cdr/code-server)
|
- [VSCode Server](https://github.com/cdr/code-server)
|
||||||
- [MinIO](https://min.io)
|
- [MinIO](https://min.io)
|
||||||
@@ -91,7 +99,7 @@ You can host cool open-source services as well:
|
|||||||
|
|
||||||
## Migration from v1
|
## Migration from v1
|
||||||
|
|
||||||
A fresh installation is necessary. v2 is not compatible with v1.
|
A fresh installation is necessary. v2 and v3 are not compatible with v1.
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
|
|||||||
9
apps/api/.eslintignore
Normal file
9
apps/api/.eslintignore
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
seed.js
|
||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
build
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
dist
|
||||||
|
dev.db
|
||||||
11
apps/api/.eslintrc
Normal file
11
apps/api/.eslintrc
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"root": true,
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"plugins": ["@typescript-eslint", "prettier"],
|
||||||
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
|
"plugin:@typescript-eslint/eslint-recommended",
|
||||||
|
"plugin:@typescript-eslint/recommended",
|
||||||
|
"prettier"
|
||||||
|
]
|
||||||
|
}
|
||||||
11
apps/api/.gitignore
vendored
Normal file
11
apps/api/.gitignore
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
build
|
||||||
|
.svelte-kit
|
||||||
|
package
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
dist
|
||||||
|
dev.db
|
||||||
|
client
|
||||||
0
apps/api/db/.gitkeep
Normal file
0
apps/api/db/.gitkeep
Normal file
7
apps/api/nodemon.json
Normal file
7
apps/api/nodemon.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"watch": ["src"],
|
||||||
|
"ignore": ["src/**/*.test.ts"],
|
||||||
|
"ext": "ts,mjs,json,graphql",
|
||||||
|
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\) | grep -v client/` --platform=node --outdir=build --format=cjs && node build",
|
||||||
|
"legacyWatch": true
|
||||||
|
}
|
||||||
68
apps/api/package.json
Normal file
68
apps/api/package.json
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
{
|
||||||
|
"name": "coolify-api",
|
||||||
|
"description": "Coolify's Fastify API",
|
||||||
|
"license": "AGPL-3.0",
|
||||||
|
"scripts": {
|
||||||
|
"db:push": "prisma db push && prisma generate",
|
||||||
|
"db:seed": "prisma db seed",
|
||||||
|
"db:studio": "prisma studio",
|
||||||
|
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||||
|
"dev": "nodemon",
|
||||||
|
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||||
|
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||||
|
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||||
|
"start": "NODE_ENV=production npx -y prisma migrate deploy && npx prisma generate && npx prisma db seed && node index.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@breejs/ts-worker": "2.0.0",
|
||||||
|
"@fastify/autoload": "5.0.0",
|
||||||
|
"@fastify/cookie": "7.0.0",
|
||||||
|
"@fastify/cors": "8.0.0",
|
||||||
|
"@fastify/env": "4.0.0",
|
||||||
|
"@fastify/jwt": "6.2.0",
|
||||||
|
"@fastify/static": "6.4.0",
|
||||||
|
"@iarna/toml": "2.2.5",
|
||||||
|
"@prisma/client": "3.15.2",
|
||||||
|
"axios": "0.27.2",
|
||||||
|
"bcryptjs": "2.4.3",
|
||||||
|
"bree": "9.1.1",
|
||||||
|
"cabin": "9.1.2",
|
||||||
|
"compare-versions": "4.1.3",
|
||||||
|
"cuid": "2.1.8",
|
||||||
|
"dayjs": "1.11.3",
|
||||||
|
"dockerode": "3.3.2",
|
||||||
|
"dotenv-extended": "2.9.0",
|
||||||
|
"fastify": "4.2.0",
|
||||||
|
"fastify-plugin": "3.0.1",
|
||||||
|
"generate-password": "1.7.0",
|
||||||
|
"get-port": "6.1.2",
|
||||||
|
"got": "12.1.0",
|
||||||
|
"is-ip": "4.0.0",
|
||||||
|
"js-yaml": "4.1.0",
|
||||||
|
"jsonwebtoken": "8.5.1",
|
||||||
|
"node-forge": "1.3.1",
|
||||||
|
"node-os-utils": "1.3.7",
|
||||||
|
"p-queue": "7.2.0",
|
||||||
|
"strip-ansi": "7.0.1",
|
||||||
|
"unique-names-generator": "4.7.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "18.0.3",
|
||||||
|
"@types/node-os-utils": "1.3.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.30.5",
|
||||||
|
"@typescript-eslint/parser": "5.30.5",
|
||||||
|
"esbuild": "0.14.48",
|
||||||
|
"eslint": "8.19.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"eslint-plugin-prettier": "4.2.1",
|
||||||
|
"nodemon": "2.0.19",
|
||||||
|
"prettier": "2.7.1",
|
||||||
|
"prisma": "3.15.2",
|
||||||
|
"rimraf": "3.0.2",
|
||||||
|
"tsconfig-paths": "4.0.0",
|
||||||
|
"typescript": "4.7.4"
|
||||||
|
},
|
||||||
|
"prisma": {
|
||||||
|
"seed": "node prisma/seed.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "exposePort" INTEGER;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Service" ADD COLUMN "exposePort" INTEGER;
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_PlausibleAnalytics" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"email" TEXT,
|
||||||
|
"username" TEXT,
|
||||||
|
"password" TEXT NOT NULL,
|
||||||
|
"postgresqlUser" TEXT NOT NULL,
|
||||||
|
"postgresqlPassword" TEXT NOT NULL,
|
||||||
|
"postgresqlDatabase" TEXT NOT NULL,
|
||||||
|
"postgresqlPublicPort" INTEGER,
|
||||||
|
"secretKeyBase" TEXT,
|
||||||
|
"scriptName" TEXT NOT NULL DEFAULT 'plausible.js',
|
||||||
|
"serviceId" TEXT NOT NULL,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
CONSTRAINT "PlausibleAnalytics_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_PlausibleAnalytics" ("createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username") SELECT "createdAt", "email", "id", "password", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "serviceId", "updatedAt", "username" FROM "PlausibleAnalytics";
|
||||||
|
DROP TABLE "PlausibleAnalytics";
|
||||||
|
ALTER TABLE "new_PlausibleAnalytics" RENAME TO "PlausibleAnalytics";
|
||||||
|
CREATE UNIQUE INDEX "PlausibleAnalytics_serviceId_key" ON "PlausibleAnalytics"("serviceId");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Wordpress" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"extraConfig" TEXT,
|
||||||
|
"tablePrefix" TEXT,
|
||||||
|
"ownMysql" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"mysqlHost" TEXT,
|
||||||
|
"mysqlPort" INTEGER,
|
||||||
|
"mysqlUser" TEXT NOT NULL,
|
||||||
|
"mysqlPassword" TEXT NOT NULL,
|
||||||
|
"mysqlRootUser" TEXT NOT NULL,
|
||||||
|
"mysqlRootUserPassword" TEXT NOT NULL,
|
||||||
|
"mysqlDatabase" TEXT,
|
||||||
|
"mysqlPublicPort" INTEGER,
|
||||||
|
"ftpEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"ftpUser" TEXT,
|
||||||
|
"ftpPassword" TEXT,
|
||||||
|
"ftpPublicPort" INTEGER,
|
||||||
|
"ftpHostKey" TEXT,
|
||||||
|
"ftpHostKeyPrivate" TEXT,
|
||||||
|
"serviceId" TEXT NOT NULL,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
CONSTRAINT "Wordpress_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Wordpress" ("createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt") SELECT "createdAt", "extraConfig", "ftpEnabled", "ftpHostKey", "ftpHostKeyPrivate", "ftpPassword", "ftpPublicPort", "ftpUser", "id", "mysqlDatabase", "mysqlPassword", "mysqlPublicPort", "mysqlRootUser", "mysqlRootUserPassword", "mysqlUser", "serviceId", "tablePrefix", "updatedAt" FROM "Wordpress";
|
||||||
|
DROP TABLE "Wordpress";
|
||||||
|
ALTER TABLE "new_Wordpress" RENAME TO "Wordpress";
|
||||||
|
CREATE UNIQUE INDEX "Wordpress_serviceId_key" ON "Wordpress"("serviceId");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"proxyPassword" TEXT NOT NULL,
|
||||||
|
"proxyUser" TEXT NOT NULL,
|
||||||
|
"proxyHash" TEXT,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "createdAt", "dualCerts", "fqdn", "id", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "maxPort", "minPort", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Minio" ADD COLUMN "apiFqdn" TEXT;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "deploymentType" TEXT;
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_GitSource" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"type" TEXT,
|
||||||
|
"apiUrl" TEXT,
|
||||||
|
"htmlUrl" TEXT,
|
||||||
|
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||||
|
"organization" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"githubAppId" TEXT,
|
||||||
|
"gitlabAppId" TEXT,
|
||||||
|
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||||
|
DROP TABLE "GitSource";
|
||||||
|
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||||
|
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||||
|
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -20,6 +20,7 @@ model Setting {
|
|||||||
proxyHash String?
|
proxyHash String?
|
||||||
isAutoUpdateEnabled Boolean @default(false)
|
isAutoUpdateEnabled Boolean @default(false)
|
||||||
isDNSCheckEnabled Boolean @default(true)
|
isDNSCheckEnabled Boolean @default(true)
|
||||||
|
isTraefikUsed Boolean @default(true)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
}
|
}
|
||||||
@@ -84,11 +85,13 @@ model Application {
|
|||||||
buildPack String?
|
buildPack String?
|
||||||
projectId Int?
|
projectId Int?
|
||||||
port Int?
|
port Int?
|
||||||
|
exposePort Int?
|
||||||
installCommand String?
|
installCommand String?
|
||||||
buildCommand String?
|
buildCommand String?
|
||||||
startCommand String?
|
startCommand String?
|
||||||
baseDirectory String?
|
baseDirectory String?
|
||||||
publishDirectory String?
|
publishDirectory String?
|
||||||
|
deploymentType String?
|
||||||
phpModules String?
|
phpModules String?
|
||||||
pythonWSGI String?
|
pythonWSGI String?
|
||||||
pythonModule String?
|
pythonModule String?
|
||||||
@@ -215,6 +218,7 @@ model GitSource {
|
|||||||
type String?
|
type String?
|
||||||
apiUrl String?
|
apiUrl String?
|
||||||
htmlUrl String?
|
htmlUrl String?
|
||||||
|
customPort Int @default(22)
|
||||||
organization String?
|
organization String?
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
@@ -289,6 +293,7 @@ model Service {
|
|||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String
|
name String
|
||||||
fqdn String?
|
fqdn String?
|
||||||
|
exposePort Int?
|
||||||
dualCerts Boolean @default(false)
|
dualCerts Boolean @default(false)
|
||||||
type String?
|
type String?
|
||||||
version String?
|
version String?
|
||||||
@@ -320,6 +325,7 @@ model PlausibleAnalytics {
|
|||||||
postgresqlDatabase String
|
postgresqlDatabase String
|
||||||
postgresqlPublicPort Int?
|
postgresqlPublicPort Int?
|
||||||
secretKeyBase String?
|
secretKeyBase String?
|
||||||
|
scriptName String @default("plausible.js")
|
||||||
serviceId String @unique
|
serviceId String @unique
|
||||||
service Service @relation(fields: [serviceId], references: [id])
|
service Service @relation(fields: [serviceId], references: [id])
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
@@ -331,6 +337,7 @@ model Minio {
|
|||||||
rootUser String
|
rootUser String
|
||||||
rootUserPassword String
|
rootUserPassword String
|
||||||
publicPort Int?
|
publicPort Int?
|
||||||
|
apiFqdn String?
|
||||||
serviceId String @unique
|
serviceId String @unique
|
||||||
service Service @relation(fields: [serviceId], references: [id])
|
service Service @relation(fields: [serviceId], references: [id])
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
@@ -350,6 +357,9 @@ model Wordpress {
|
|||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
extraConfig String?
|
extraConfig String?
|
||||||
tablePrefix String?
|
tablePrefix String?
|
||||||
|
ownMysql Boolean @default(false)
|
||||||
|
mysqlHost String?
|
||||||
|
mysqlPort Int?
|
||||||
mysqlUser String
|
mysqlUser String
|
||||||
mysqlPassword String
|
mysqlPassword String
|
||||||
mysqlRootUser String
|
mysqlRootUser String
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
const dotEnvExtended = require('dotenv-extended');
|
const dotEnvExtended = require('dotenv-extended');
|
||||||
dotEnvExtended.load();
|
dotEnvExtended.load();
|
||||||
const { PrismaClient } = require('@prisma/client');
|
|
||||||
const prisma = new PrismaClient();
|
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const generator = require('generate-password');
|
const generator = require('generate-password');
|
||||||
const cuid = require('cuid');
|
const cuid = require('cuid');
|
||||||
|
const { PrismaClient } = require('@prisma/client');
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
function generatePassword(length = 24) {
|
function generatePassword(length = 24) {
|
||||||
return generator.generate({
|
return generator.generate({
|
||||||
@@ -33,6 +33,7 @@ async function main() {
|
|||||||
id: settingsFound.id
|
id: settingsFound.id
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
|
isTraefikUsed: true,
|
||||||
proxyHash: null
|
proxyHash: null
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -84,20 +85,4 @@ const encrypt = (text) => {
|
|||||||
content: encrypted.toString('hex')
|
content: encrypted.toString('hex')
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const decrypt = (hashString) => {
|
|
||||||
if (hashString) {
|
|
||||||
const hash = JSON.parse(hashString);
|
|
||||||
const decipher = crypto.createDecipheriv(
|
|
||||||
algorithm,
|
|
||||||
process.env['COOLIFY_SECRET_KEY'],
|
|
||||||
Buffer.from(hash.iv, 'hex')
|
|
||||||
);
|
|
||||||
const decrpyted = Buffer.concat([
|
|
||||||
decipher.update(Buffer.from(hash.content, 'hex')),
|
|
||||||
decipher.final()
|
|
||||||
]);
|
|
||||||
return decrpyted.toString();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
141
apps/api/src/index.ts
Normal file
141
apps/api/src/index.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import Fastify from 'fastify';
|
||||||
|
import cors from '@fastify/cors';
|
||||||
|
import serve from '@fastify/static';
|
||||||
|
import env from '@fastify/env';
|
||||||
|
import cookie from '@fastify/cookie';
|
||||||
|
import path, { join } from 'path';
|
||||||
|
import autoLoad from '@fastify/autoload';
|
||||||
|
import { asyncExecShell, isDev, prisma } from './lib/common';
|
||||||
|
import { scheduler } from './lib/scheduler';
|
||||||
|
|
||||||
|
declare module 'fastify' {
|
||||||
|
interface FastifyInstance {
|
||||||
|
config: {
|
||||||
|
COOLIFY_APP_ID: string,
|
||||||
|
COOLIFY_SECRET_KEY: string,
|
||||||
|
COOLIFY_DATABASE_URL: string,
|
||||||
|
COOLIFY_SENTRY_DSN: string,
|
||||||
|
COOLIFY_IS_ON: string,
|
||||||
|
COOLIFY_WHITE_LABELED: boolean,
|
||||||
|
COOLIFY_WHITE_LABELED_ICON: string | null,
|
||||||
|
COOLIFY_AUTO_UPDATE: boolean,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = isDev ? 3001 : 3000;
|
||||||
|
const host = '0.0.0.0';
|
||||||
|
const fastify = Fastify({
|
||||||
|
logger: false,
|
||||||
|
trustProxy: true
|
||||||
|
});
|
||||||
|
const schema = {
|
||||||
|
type: 'object',
|
||||||
|
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||||
|
properties: {
|
||||||
|
COOLIFY_APP_ID: {
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
COOLIFY_SECRET_KEY: {
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
COOLIFY_DATABASE_URL: {
|
||||||
|
type: 'string',
|
||||||
|
default: 'file:../db/dev.db'
|
||||||
|
},
|
||||||
|
COOLIFY_SENTRY_DSN: {
|
||||||
|
type: 'string',
|
||||||
|
default: null
|
||||||
|
},
|
||||||
|
COOLIFY_IS_ON: {
|
||||||
|
type: 'string',
|
||||||
|
default: 'docker'
|
||||||
|
},
|
||||||
|
COOLIFY_WHITE_LABELED: {
|
||||||
|
type: 'boolean',
|
||||||
|
default: false
|
||||||
|
},
|
||||||
|
COOLIFY_WHITE_LABELED_ICON: {
|
||||||
|
type: 'string',
|
||||||
|
default: null
|
||||||
|
},
|
||||||
|
COOLIFY_AUTO_UPDATE: {
|
||||||
|
type: 'boolean',
|
||||||
|
default: false
|
||||||
|
},
|
||||||
|
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
schema,
|
||||||
|
dotenv: true
|
||||||
|
};
|
||||||
|
fastify.register(env, options);
|
||||||
|
if (!isDev) {
|
||||||
|
fastify.register(serve, {
|
||||||
|
root: path.join(__dirname, './public'),
|
||||||
|
preCompressed: true
|
||||||
|
});
|
||||||
|
fastify.setNotFoundHandler({}, function (request, reply) {
|
||||||
|
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||||
|
return reply.status(404).send({
|
||||||
|
success: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return reply.status(200).sendFile('index.html');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
fastify.register(autoLoad, {
|
||||||
|
dir: join(__dirname, 'plugins')
|
||||||
|
});
|
||||||
|
fastify.register(autoLoad, {
|
||||||
|
dir: join(__dirname, 'routes')
|
||||||
|
});
|
||||||
|
|
||||||
|
fastify.register(cookie)
|
||||||
|
fastify.register(cors);
|
||||||
|
fastify.listen({ port, host }, async (err: any, address: any) => {
|
||||||
|
if (err) {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||||
|
await initServer()
|
||||||
|
await scheduler.start('deployApplication');
|
||||||
|
await scheduler.start('cleanupStorage');
|
||||||
|
await scheduler.start('checkProxies')
|
||||||
|
|
||||||
|
// Check if no build is running
|
||||||
|
|
||||||
|
// Check for update
|
||||||
|
setInterval(async () => {
|
||||||
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
|
if (isAutoUpdateEnabled) {
|
||||||
|
if (scheduler.workers.has('deployApplication')) {
|
||||||
|
scheduler.workers.get('deployApplication').postMessage("status:autoUpdater");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 60000 * 15)
|
||||||
|
|
||||||
|
// Cleanup storage
|
||||||
|
setInterval(async () => {
|
||||||
|
if (scheduler.workers.has('deployApplication')) {
|
||||||
|
scheduler.workers.get('deployApplication').postMessage("status:cleanupStorage");
|
||||||
|
}
|
||||||
|
}, 60000 * 10)
|
||||||
|
|
||||||
|
scheduler.on('worker deleted', async (name) => {
|
||||||
|
if (name === 'autoUpdater' || name === 'cleanupStorage') {
|
||||||
|
if (!scheduler.workers.has('deployApplication')) await scheduler.start('deployApplication');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
async function initServer() {
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`docker network create --attachable coolify`);
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
43
apps/api/src/jobs/autoUpdater.ts
Normal file
43
apps/api/src/jobs/autoUpdater.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
import compareVersions from 'compare-versions';
|
||||||
|
import { parentPort } from 'node:worker_threads';
|
||||||
|
import { asyncExecShell, asyncSleep, isDev, prisma, version } from '../lib/common';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
if (parentPort) {
|
||||||
|
try {
|
||||||
|
const currentVersion = version;
|
||||||
|
const { data: versions } = await axios
|
||||||
|
.get(
|
||||||
|
`https://get.coollabs.io/versions.json`
|
||||||
|
, {
|
||||||
|
params: {
|
||||||
|
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||||
|
version: currentVersion
|
||||||
|
}
|
||||||
|
})
|
||||||
|
const latestVersion = versions['coolify'].main.version;
|
||||||
|
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||||
|
if (isUpdateAvailable === 1) {
|
||||||
|
const activeCount = 0
|
||||||
|
if (activeCount === 0) {
|
||||||
|
if (!isDev) {
|
||||||
|
console.log(`Updating Coolify to ${latestVersion}.`);
|
||||||
|
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||||
|
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||||
|
await asyncExecShell(
|
||||||
|
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log('Updating (not really in dev mode).');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
|
} else process.exit(0);
|
||||||
|
})();
|
||||||
88
apps/api/src/jobs/checkProxies.ts
Normal file
88
apps/api/src/jobs/checkProxies.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import { parentPort } from 'node:worker_threads';
|
||||||
|
import { prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, asyncExecShell } from '../lib/common';
|
||||||
|
import { checkContainer, getEngine } from '../lib/docker';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
if (parentPort) {
|
||||||
|
// Coolify Proxy
|
||||||
|
const engine = '/var/run/docker.sock';
|
||||||
|
const localDocker = await prisma.destinationDocker.findFirst({
|
||||||
|
where: { engine, network: 'coolify' }
|
||||||
|
});
|
||||||
|
if (localDocker && localDocker.isCoolifyProxyUsed) {
|
||||||
|
// Remove HAProxy
|
||||||
|
const found = await checkContainer(engine, 'coolify-haproxy');
|
||||||
|
const host = getEngine(engine);
|
||||||
|
if (found) {
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker stop -t 0 coolify-haproxy && docker rm coolify-haproxy`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
await startTraefikProxy(engine);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// TCP Proxies
|
||||||
|
const databasesWithPublicPort = await prisma.database.findMany({
|
||||||
|
where: { publicPort: { not: null } },
|
||||||
|
include: { settings: true, destinationDocker: true }
|
||||||
|
});
|
||||||
|
for (const database of databasesWithPublicPort) {
|
||||||
|
const { destinationDockerId, destinationDocker, publicPort, id } = database;
|
||||||
|
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||||
|
const { privatePort } = generateDatabaseConfiguration(database);
|
||||||
|
// Remove HAProxy
|
||||||
|
const found = await checkContainer(engine, `haproxy-for-${publicPort}`);
|
||||||
|
const host = getEngine(engine);
|
||||||
|
if (found) {
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${publicPort} && docker rm haproxy-for-${publicPort}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const wordpressWithFtp = await prisma.wordpress.findMany({
|
||||||
|
where: { ftpPublicPort: { not: null } },
|
||||||
|
include: { service: { include: { destinationDocker: true } } }
|
||||||
|
});
|
||||||
|
for (const ftp of wordpressWithFtp) {
|
||||||
|
const { service, ftpPublicPort } = ftp;
|
||||||
|
const { destinationDockerId, destinationDocker, id } = service;
|
||||||
|
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||||
|
// Remove HAProxy
|
||||||
|
const found = await checkContainer(engine, `haproxy-for-${ftpPublicPort}`);
|
||||||
|
const host = getEngine(engine);
|
||||||
|
if (found) {
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker stop -t 0 haproxy-for-${ftpPublicPort} && docker rm haproxy-for-${ftpPublicPort} `
|
||||||
|
);
|
||||||
|
}
|
||||||
|
await startTraefikTCPProxy(destinationDocker, id, ftpPublicPort, 22, 'wordpressftp');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// HTTP Proxies
|
||||||
|
const minioInstances = await prisma.minio.findMany({
|
||||||
|
where: { publicPort: { not: null } },
|
||||||
|
include: { service: { include: { destinationDocker: true } } }
|
||||||
|
});
|
||||||
|
for (const minio of minioInstances) {
|
||||||
|
const { service, publicPort } = minio;
|
||||||
|
const { destinationDockerId, destinationDocker, id } = service;
|
||||||
|
if (destinationDockerId && destinationDocker.isCoolifyProxyUsed) {
|
||||||
|
// Remove HAProxy
|
||||||
|
const found = await checkContainer(engine, `${id}-${publicPort}`);
|
||||||
|
const host = getEngine(engine);
|
||||||
|
if (found) {
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker stop -t 0 ${id}-${publicPort} && docker rm ${id}-${publicPort}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
} else process.exit(0);
|
||||||
|
})();
|
||||||
90
apps/api/src/jobs/cleanupStorage.ts
Normal file
90
apps/api/src/jobs/cleanupStorage.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { parentPort } from 'node:worker_threads';
|
||||||
|
import { asyncExecShell, isDev, prisma, version } from '../lib/common';
|
||||||
|
import { getEngine } from '../lib/docker';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
if (parentPort) {
|
||||||
|
const destinationDockers = await prisma.destinationDocker.findMany();
|
||||||
|
const engines = [...new Set(destinationDockers.map(({ engine }) => engine))];
|
||||||
|
for (const engine of engines) {
|
||||||
|
let lowDiskSpace = false;
|
||||||
|
const host = getEngine(engine);
|
||||||
|
// try {
|
||||||
|
// let stdout = null
|
||||||
|
// if (!isDev) {
|
||||||
|
// const output = await asyncExecShell(
|
||||||
|
// `DOCKER_HOST=${host} docker exec coolify sh -c 'df -kPT /'`
|
||||||
|
// );
|
||||||
|
// stdout = output.stdout;
|
||||||
|
// } else {
|
||||||
|
// const output = await asyncExecShell(
|
||||||
|
// `df -kPT /`
|
||||||
|
// );
|
||||||
|
// stdout = output.stdout;
|
||||||
|
// }
|
||||||
|
// let lines = stdout.trim().split('\n');
|
||||||
|
// let header = lines[0];
|
||||||
|
// let regex =
|
||||||
|
// /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||||
|
// const boundaries = [];
|
||||||
|
// let match;
|
||||||
|
|
||||||
|
// while ((match = regex.exec(header))) {
|
||||||
|
// boundaries.push(match[0].length);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// boundaries[boundaries.length - 1] = -1;
|
||||||
|
// const data = lines.slice(1).map((line) => {
|
||||||
|
// const cl = boundaries.map((boundary) => {
|
||||||
|
// const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||||
|
// line = line.slice(boundary);
|
||||||
|
// return column.trim();
|
||||||
|
// });
|
||||||
|
// return {
|
||||||
|
// capacity: Number.parseInt(cl[5], 10) / 100
|
||||||
|
// };
|
||||||
|
// });
|
||||||
|
// if (data.length > 0) {
|
||||||
|
// const { capacity } = data[0];
|
||||||
|
// if (capacity > 0.6) {
|
||||||
|
// lowDiskSpace = true;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// } catch (error) {
|
||||||
|
// console.log(error);
|
||||||
|
// }
|
||||||
|
if (!isDev) {
|
||||||
|
// Cleanup old coolify images
|
||||||
|
try {
|
||||||
|
let { stdout: images } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs `
|
||||||
|
);
|
||||||
|
images = images.trim();
|
||||||
|
if (images) {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker rmi -f ${images}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
//console.log(error);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker container prune -f`);
|
||||||
|
} catch (error) {
|
||||||
|
//console.log(error);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -f`);
|
||||||
|
} catch (error) {
|
||||||
|
//console.log(error);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker image prune -a -f`);
|
||||||
|
} catch (error) {
|
||||||
|
//console.log(error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
} else process.exit(0);
|
||||||
|
})();
|
||||||
359
apps/api/src/jobs/deployApplication.ts
Normal file
359
apps/api/src/jobs/deployApplication.ts
Normal file
@@ -0,0 +1,359 @@
|
|||||||
|
import { parentPort } from 'node:worker_threads';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import fs from 'fs/promises';
|
||||||
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
|
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
|
||||||
|
import { asyncExecShell, createDirectories, decrypt, getDomain, prisma } from '../lib/common';
|
||||||
|
import { dockerInstance, getEngine } from '../lib/docker';
|
||||||
|
import * as importers from '../lib/importers';
|
||||||
|
import * as buildpacks from '../lib/buildPacks';
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
if (parentPort) {
|
||||||
|
const concurrency = 1
|
||||||
|
const PQueue = await import('p-queue');
|
||||||
|
const queue = new PQueue.default({ concurrency });
|
||||||
|
parentPort.on('message', async (message) => {
|
||||||
|
if (parentPort) {
|
||||||
|
if (message === 'error') throw new Error('oops');
|
||||||
|
if (message === 'cancel') {
|
||||||
|
parentPort.postMessage('cancelled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (message === 'status:autoUpdater') {
|
||||||
|
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'autoUpdater' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (message === 'status:cleanupStorage') {
|
||||||
|
parentPort.postMessage({ size: queue.size, pending: queue.pending, caller: 'cleanupStorage' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await queue.add(async () => {
|
||||||
|
const {
|
||||||
|
id: applicationId,
|
||||||
|
repository,
|
||||||
|
name,
|
||||||
|
destinationDocker,
|
||||||
|
destinationDockerId,
|
||||||
|
gitSource,
|
||||||
|
build_id: buildId,
|
||||||
|
configHash,
|
||||||
|
fqdn,
|
||||||
|
projectId,
|
||||||
|
secrets,
|
||||||
|
phpModules,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId = null,
|
||||||
|
sourceBranch = null,
|
||||||
|
settings,
|
||||||
|
persistentStorage,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
denoOptions,
|
||||||
|
exposePort,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType,
|
||||||
|
} = message
|
||||||
|
let {
|
||||||
|
branch,
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile
|
||||||
|
} = message
|
||||||
|
try {
|
||||||
|
const { debug } = settings;
|
||||||
|
if (concurrency === 1) {
|
||||||
|
await prisma.build.updateMany({
|
||||||
|
where: {
|
||||||
|
status: { in: ['queued', 'running'] },
|
||||||
|
id: { not: buildId },
|
||||||
|
applicationId,
|
||||||
|
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||||
|
},
|
||||||
|
data: { status: 'failed' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let imageId = applicationId;
|
||||||
|
let domain = getDomain(fqdn);
|
||||||
|
const volumes =
|
||||||
|
persistentStorage?.map((storage) => {
|
||||||
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||||
|
}${storage.path}`;
|
||||||
|
}) || [];
|
||||||
|
// Previews, we need to get the source branch and set subdomain
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
branch = sourceBranch;
|
||||||
|
domain = `${pullmergeRequestId}.${domain}`;
|
||||||
|
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
let deployNeeded = true;
|
||||||
|
let destinationType;
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
destinationType = 'docker';
|
||||||
|
}
|
||||||
|
if (destinationType === 'docker') {
|
||||||
|
const docker = dockerInstance({ destinationDocker });
|
||||||
|
const host = getEngine(destinationDocker.engine);
|
||||||
|
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
||||||
|
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||||
|
const configuration = await setDefaultConfiguration(message);
|
||||||
|
|
||||||
|
buildPack = configuration.buildPack;
|
||||||
|
port = configuration.port;
|
||||||
|
installCommand = configuration.installCommand;
|
||||||
|
startCommand = configuration.startCommand;
|
||||||
|
buildCommand = configuration.buildCommand;
|
||||||
|
publishDirectory = configuration.publishDirectory;
|
||||||
|
baseDirectory = configuration.baseDirectory;
|
||||||
|
dockerFileLocation = configuration.dockerFileLocation;
|
||||||
|
denoMainFile = configuration.denoMainFile;
|
||||||
|
const commit = await importers[gitSource.type]({
|
||||||
|
applicationId,
|
||||||
|
debug,
|
||||||
|
workdir,
|
||||||
|
repodir,
|
||||||
|
githubAppId: gitSource.githubApp?.id,
|
||||||
|
gitlabAppId: gitSource.gitlabApp?.id,
|
||||||
|
customPort: gitSource.customPort,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
buildId,
|
||||||
|
apiUrl: gitSource.apiUrl,
|
||||||
|
htmlUrl: gitSource.htmlUrl,
|
||||||
|
projectId,
|
||||||
|
deployKeyId: gitSource.gitlabApp?.deployKeyId || null,
|
||||||
|
privateSshKey: decrypt(gitSource.gitlabApp?.privateSshKey) || null
|
||||||
|
});
|
||||||
|
if (!commit) {
|
||||||
|
throw new Error('No commit found?');
|
||||||
|
}
|
||||||
|
let tag = commit.slice(0, 7);
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||||
|
} catch (err) {
|
||||||
|
console.log(err);
|
||||||
|
}
|
||||||
|
if (!pullmergeRequestId) {
|
||||||
|
const currentHash = crypto
|
||||||
|
//@ts-ignore
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(
|
||||||
|
JSON.stringify({
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
exposePort,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
secrets,
|
||||||
|
branch,
|
||||||
|
repository,
|
||||||
|
fqdn
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.digest('hex');
|
||||||
|
|
||||||
|
if (configHash !== currentHash) {
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id: applicationId },
|
||||||
|
data: { configHash: currentHash }
|
||||||
|
});
|
||||||
|
deployNeeded = true;
|
||||||
|
if (configHash) {
|
||||||
|
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
deployNeeded = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
deployNeeded = true;
|
||||||
|
}
|
||||||
|
const image = await docker.engine.getImage(`${applicationId}:${tag}`);
|
||||||
|
let imageFound = false;
|
||||||
|
try {
|
||||||
|
await image.inspect();
|
||||||
|
imageFound = false;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
if (!imageFound || deployNeeded) {
|
||||||
|
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||||
|
if (buildpacks[buildPack])
|
||||||
|
await buildpacks[buildPack]({
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
domain,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
publishDirectory,
|
||||||
|
debug,
|
||||||
|
commit,
|
||||||
|
tag,
|
||||||
|
workdir,
|
||||||
|
docker,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
secrets,
|
||||||
|
phpModules,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile,
|
||||||
|
denoOptions,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType
|
||||||
|
});
|
||||||
|
else {
|
||||||
|
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
||||||
|
throw new Error(`Build pack ${buildPack} not found.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await saveBuildLog({ line: 'Nothing changed.', buildId, applicationId });
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${imageId}`);
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${imageId}`);
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
const envs = [];
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
if (secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
const labels = makeLabelForStandaloneApplication({
|
||||||
|
applicationId,
|
||||||
|
fqdn,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port,
|
||||||
|
commit,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory
|
||||||
|
});
|
||||||
|
let envFound = false;
|
||||||
|
try {
|
||||||
|
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
||||||
|
const composeVolumes = volumes.map((volume) => {
|
||||||
|
return {
|
||||||
|
[`${volume.split(':')[0]}`]: {
|
||||||
|
name: volume.split(':')[0]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const composeFile = {
|
||||||
|
version: '3.8',
|
||||||
|
services: {
|
||||||
|
[imageId]: {
|
||||||
|
image: `${applicationId}:${tag}`,
|
||||||
|
container_name: imageId,
|
||||||
|
volumes,
|
||||||
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
|
networks: [docker.network],
|
||||||
|
labels,
|
||||||
|
depends_on: [],
|
||||||
|
restart: 'always',
|
||||||
|
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||||
|
// logging: {
|
||||||
|
// driver: 'fluentd',
|
||||||
|
// },
|
||||||
|
deploy: {
|
||||||
|
restart_policy: {
|
||||||
|
condition: 'on-failure',
|
||||||
|
delay: '5s',
|
||||||
|
max_attempts: 3,
|
||||||
|
window: '120s'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
networks: {
|
||||||
|
[docker.network]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
|
};
|
||||||
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker compose --project-directory ${workdir} up -d`
|
||||||
|
);
|
||||||
|
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
||||||
|
} catch (error) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: message.build_id },
|
||||||
|
data: { status: 'failed' }
|
||||||
|
});
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
||||||
|
await prisma.build.update({ where: { id: message.build_id }, data: { status: 'success' } });
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: message.build_id },
|
||||||
|
data: { status: 'failed' }
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
|
} finally {
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await prisma.$disconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else process.exit(0);
|
||||||
|
})();
|
||||||
@@ -1,240 +1,23 @@
|
|||||||
import { base64Encode } from '$lib/crypto';
|
import { asyncExecShell, base64Encode, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
||||||
import { getDomain, saveBuildLog, version } from '$lib/common';
|
import { scheduler } from "../scheduler";
|
||||||
import * as db from '$lib/database';
|
|
||||||
import { scanningTemplates } from '$lib/components/templates';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { staticDeployments } from '$lib/components/common';
|
import { day } from "../dayjs";
|
||||||
|
|
||||||
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
||||||
const nodeBased = [
|
const nodeBased = [
|
||||||
'react',
|
'react',
|
||||||
|
'preact',
|
||||||
'vuejs',
|
'vuejs',
|
||||||
'svelte',
|
'svelte',
|
||||||
'gatsby',
|
'gatsby',
|
||||||
'php',
|
|
||||||
'astro',
|
'astro',
|
||||||
'eleventy',
|
'eleventy',
|
||||||
'node',
|
'node',
|
||||||
'nestjs'
|
'nestjs',
|
||||||
|
'nuxtjs',
|
||||||
|
'nextjs'
|
||||||
];
|
];
|
||||||
|
|
||||||
export function makeLabelForStandaloneApplication({
|
export function setDefaultBaseImage(buildPack: string | null, deploymentType: string | null = null) {
|
||||||
applicationId,
|
|
||||||
fqdn,
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
pullmergeRequestId = null,
|
|
||||||
buildPack,
|
|
||||||
repository,
|
|
||||||
branch,
|
|
||||||
projectId,
|
|
||||||
port,
|
|
||||||
commit,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
publishDirectory
|
|
||||||
}) {
|
|
||||||
if (pullmergeRequestId) {
|
|
||||||
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
|
|
||||||
const domain = getDomain(fqdn);
|
|
||||||
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
|
||||||
}
|
|
||||||
return [
|
|
||||||
'coolify.managed=true',
|
|
||||||
`coolify.version=${version}`,
|
|
||||||
`coolify.type=standalone-application`,
|
|
||||||
`coolify.configuration=${base64Encode(
|
|
||||||
JSON.stringify({
|
|
||||||
applicationId,
|
|
||||||
fqdn,
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
pullmergeRequestId,
|
|
||||||
buildPack,
|
|
||||||
repository,
|
|
||||||
branch,
|
|
||||||
projectId,
|
|
||||||
port,
|
|
||||||
commit,
|
|
||||||
installCommand,
|
|
||||||
buildCommand,
|
|
||||||
startCommand,
|
|
||||||
baseDirectory,
|
|
||||||
publishDirectory
|
|
||||||
})
|
|
||||||
)}`
|
|
||||||
];
|
|
||||||
}
|
|
||||||
export async function makeLabelForStandaloneDatabase({ id, image, volume }) {
|
|
||||||
const database = await db.prisma.database.findFirst({ where: { id } });
|
|
||||||
delete database.destinationDockerId;
|
|
||||||
delete database.createdAt;
|
|
||||||
delete database.updatedAt;
|
|
||||||
return [
|
|
||||||
'coolify.managed=true',
|
|
||||||
`coolify.version=${version}`,
|
|
||||||
`coolify.type=standalone-database`,
|
|
||||||
`coolify.configuration=${base64Encode(
|
|
||||||
JSON.stringify({
|
|
||||||
version,
|
|
||||||
image,
|
|
||||||
volume,
|
|
||||||
...database
|
|
||||||
})
|
|
||||||
)}`
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function makeLabelForServices(type) {
|
|
||||||
return [
|
|
||||||
'coolify.managed=true',
|
|
||||||
`coolify.version=${version}`,
|
|
||||||
`coolify.type=service`,
|
|
||||||
`coolify.service.type=${type}`
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const setDefaultConfiguration = async (data) => {
|
|
||||||
let {
|
|
||||||
buildPack,
|
|
||||||
port,
|
|
||||||
installCommand,
|
|
||||||
startCommand,
|
|
||||||
buildCommand,
|
|
||||||
publishDirectory,
|
|
||||||
baseDirectory,
|
|
||||||
dockerFileLocation,
|
|
||||||
denoMainFile
|
|
||||||
} = data;
|
|
||||||
const template = scanningTemplates[buildPack];
|
|
||||||
if (!port) {
|
|
||||||
port = template?.port || 3000;
|
|
||||||
|
|
||||||
if (buildPack === 'static') port = 80;
|
|
||||||
else if (buildPack === 'node') port = 3000;
|
|
||||||
else if (buildPack === 'php') port = 80;
|
|
||||||
else if (buildPack === 'python') port = 8000;
|
|
||||||
}
|
|
||||||
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
|
||||||
installCommand = template?.installCommand || 'yarn install';
|
|
||||||
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
|
||||||
startCommand = template?.startCommand || 'yarn start';
|
|
||||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
|
||||||
buildCommand = template?.buildCommand || null;
|
|
||||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
|
||||||
if (baseDirectory) {
|
|
||||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
|
||||||
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
|
|
||||||
}
|
|
||||||
if (dockerFileLocation) {
|
|
||||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
|
||||||
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
|
||||||
} else {
|
|
||||||
dockerFileLocation = '/Dockerfile';
|
|
||||||
}
|
|
||||||
if (!denoMainFile) {
|
|
||||||
denoMainFile = 'main.ts';
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
buildPack,
|
|
||||||
port,
|
|
||||||
installCommand,
|
|
||||||
startCommand,
|
|
||||||
buildCommand,
|
|
||||||
publishDirectory,
|
|
||||||
baseDirectory,
|
|
||||||
dockerFileLocation,
|
|
||||||
denoMainFile
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function copyBaseConfigurationFiles(
|
|
||||||
buildPack,
|
|
||||||
workdir,
|
|
||||||
buildId,
|
|
||||||
applicationId,
|
|
||||||
baseImage
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
if (buildPack === 'php') {
|
|
||||||
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
|
||||||
await saveBuildLog({
|
|
||||||
line: 'Copied default configuration file for PHP.',
|
|
||||||
buildId,
|
|
||||||
applicationId
|
|
||||||
});
|
|
||||||
} else if (staticDeployments.includes(buildPack) && baseImage.includes('nginx')) {
|
|
||||||
await fs.writeFile(
|
|
||||||
`${workdir}/nginx.conf`,
|
|
||||||
`user nginx;
|
|
||||||
worker_processes auto;
|
|
||||||
|
|
||||||
error_log /docker.stdout;
|
|
||||||
pid /run/nginx.pid;
|
|
||||||
|
|
||||||
events {
|
|
||||||
worker_connections 1024;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
|
||||||
'$status $body_bytes_sent "$http_referer" '
|
|
||||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
|
||||||
|
|
||||||
access_log /docker.stdout main;
|
|
||||||
|
|
||||||
sendfile on;
|
|
||||||
tcp_nopush on;
|
|
||||||
tcp_nodelay on;
|
|
||||||
keepalive_timeout 65;
|
|
||||||
types_hash_max_size 2048;
|
|
||||||
|
|
||||||
include /etc/nginx/mime.types;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name localhost;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
root /app;
|
|
||||||
index index.html;
|
|
||||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
|
||||||
}
|
|
||||||
|
|
||||||
error_page 404 /50x.html;
|
|
||||||
|
|
||||||
# redirect server error pages to the static page /50x.html
|
|
||||||
#
|
|
||||||
error_page 500 502 503 504 /50x.html;
|
|
||||||
location = /50x.html {
|
|
||||||
root /app;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
throw new Error(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
|
||||||
return (
|
|
||||||
installCommand?.includes('pnpm') ||
|
|
||||||
buildCommand?.includes('pnpm') ||
|
|
||||||
startCommand?.includes('pnpm')
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function setDefaultBaseImage(buildPack) {
|
|
||||||
const nodeVersions = [
|
const nodeVersions = [
|
||||||
{
|
{
|
||||||
value: 'node:lts',
|
value: 'node:lts',
|
||||||
@@ -403,18 +186,90 @@ export function setDefaultBaseImage(buildPack) {
|
|||||||
label: 'webdevops/php-nginx:7.1-alpine'
|
label: 'webdevops/php-nginx:7.1-alpine'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
const pythonVersions = [
|
||||||
let payload = {
|
{
|
||||||
|
value: 'python:3.10-alpine',
|
||||||
|
label: 'python:3.10-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.10-buster',
|
||||||
|
label: 'python:3.10-buster'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.10-bullseye',
|
||||||
|
label: 'python:3.10-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.10-slim-bullseye',
|
||||||
|
label: 'python:3.10-slim-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.9-alpine',
|
||||||
|
label: 'python:3.9-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.9-buster',
|
||||||
|
label: 'python:3.9-buster'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.9-bullseye',
|
||||||
|
label: 'python:3.9-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.9-slim-bullseye',
|
||||||
|
label: 'python:3.9-slim-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.8-alpine',
|
||||||
|
label: 'python:3.8-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.8-buster',
|
||||||
|
label: 'python:3.8-buster'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.8-bullseye',
|
||||||
|
label: 'python:3.8-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.8-slim-bullseye',
|
||||||
|
label: 'python:3.8-slim-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.7-alpine',
|
||||||
|
label: 'python:3.7-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.7-buster',
|
||||||
|
label: 'python:3.7-buster'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.7-bullseye',
|
||||||
|
label: 'python:3.7-bullseye'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'python:3.7-slim-bullseye',
|
||||||
|
label: 'python:3.7-slim-bullseye'
|
||||||
|
}
|
||||||
|
];
|
||||||
|
let payload: any = {
|
||||||
baseImage: null,
|
baseImage: null,
|
||||||
baseBuildImage: null,
|
baseBuildImage: null,
|
||||||
baseImages: [],
|
baseImages: [],
|
||||||
baseBuildImages: []
|
baseBuildImages: []
|
||||||
};
|
};
|
||||||
if (nodeBased.includes(buildPack)) {
|
if (nodeBased.includes(buildPack)) {
|
||||||
payload.baseImage = 'node:lts';
|
if (deploymentType === 'static') {
|
||||||
payload.baseImages = nodeVersions;
|
payload.baseImage = 'webdevops/nginx:alpine';
|
||||||
payload.baseBuildImage = 'node:lts';
|
payload.baseImages = staticVersions;
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImage = 'node:lts';
|
||||||
|
payload.baseBuildImages = nodeVersions;
|
||||||
|
} else {
|
||||||
|
payload.baseImage = 'node:lts';
|
||||||
|
payload.baseImages = nodeVersions;
|
||||||
|
payload.baseBuildImage = 'node:lts';
|
||||||
|
payload.baseBuildImages = nodeVersions;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (staticApps.includes(buildPack)) {
|
if (staticApps.includes(buildPack)) {
|
||||||
payload.baseImage = 'webdevops/nginx:alpine';
|
payload.baseImage = 'webdevops/nginx:alpine';
|
||||||
@@ -423,7 +278,8 @@ export function setDefaultBaseImage(buildPack) {
|
|||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
}
|
}
|
||||||
if (buildPack === 'python') {
|
if (buildPack === 'python') {
|
||||||
payload.baseImage = 'python:3-alpine';
|
payload.baseImage = 'python:3.10-alpine';
|
||||||
|
payload.baseImages = pythonVersions;
|
||||||
}
|
}
|
||||||
if (buildPack === 'rust') {
|
if (buildPack === 'rust') {
|
||||||
payload.baseImage = 'rust:latest';
|
payload.baseImage = 'rust:latest';
|
||||||
@@ -445,3 +301,421 @@ export function setDefaultBaseImage(buildPack) {
|
|||||||
}
|
}
|
||||||
return payload;
|
return payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const setDefaultConfiguration = async (data: any) => {
|
||||||
|
let {
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
installCommand,
|
||||||
|
startCommand,
|
||||||
|
buildCommand,
|
||||||
|
publishDirectory,
|
||||||
|
baseDirectory,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile
|
||||||
|
} = data;
|
||||||
|
//@ts-ignore
|
||||||
|
const template = scanningTemplates[buildPack];
|
||||||
|
if (!port) {
|
||||||
|
port = template?.port || 3000;
|
||||||
|
|
||||||
|
if (buildPack === 'static') port = 80;
|
||||||
|
else if (buildPack === 'node') port = 3000;
|
||||||
|
else if (buildPack === 'php') port = 80;
|
||||||
|
else if (buildPack === 'python') port = 8000;
|
||||||
|
}
|
||||||
|
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||||
|
installCommand = template?.installCommand || 'yarn install';
|
||||||
|
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||||
|
startCommand = template?.startCommand || 'yarn start';
|
||||||
|
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||||
|
buildCommand = template?.buildCommand || null;
|
||||||
|
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||||
|
if (baseDirectory) {
|
||||||
|
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||||
|
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
|
||||||
|
}
|
||||||
|
if (dockerFileLocation) {
|
||||||
|
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||||
|
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||||
|
} else {
|
||||||
|
dockerFileLocation = '/Dockerfile';
|
||||||
|
}
|
||||||
|
if (!denoMainFile) {
|
||||||
|
denoMainFile = 'main.ts';
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
installCommand,
|
||||||
|
startCommand,
|
||||||
|
buildCommand,
|
||||||
|
publishDirectory,
|
||||||
|
baseDirectory,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const scanningTemplates = {
|
||||||
|
'@sveltejs/kit': {
|
||||||
|
buildPack: 'nodejs'
|
||||||
|
},
|
||||||
|
astro: {
|
||||||
|
buildPack: 'astro'
|
||||||
|
},
|
||||||
|
'@11ty/eleventy': {
|
||||||
|
buildPack: 'eleventy'
|
||||||
|
},
|
||||||
|
svelte: {
|
||||||
|
buildPack: 'svelte'
|
||||||
|
},
|
||||||
|
'@nestjs/core': {
|
||||||
|
buildPack: 'nestjs'
|
||||||
|
},
|
||||||
|
next: {
|
||||||
|
buildPack: 'nextjs'
|
||||||
|
},
|
||||||
|
nuxt: {
|
||||||
|
buildPack: 'nuxtjs'
|
||||||
|
},
|
||||||
|
'react-scripts': {
|
||||||
|
buildPack: 'react'
|
||||||
|
},
|
||||||
|
'parcel-bundler': {
|
||||||
|
buildPack: 'static'
|
||||||
|
},
|
||||||
|
'@vue/cli-service': {
|
||||||
|
buildPack: 'vuejs'
|
||||||
|
},
|
||||||
|
vuejs: {
|
||||||
|
buildPack: 'vuejs'
|
||||||
|
},
|
||||||
|
gatsby: {
|
||||||
|
buildPack: 'gatsby'
|
||||||
|
},
|
||||||
|
'preact-cli': {
|
||||||
|
buildPack: 'react'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export const saveBuildLog = async ({
|
||||||
|
line,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
}: {
|
||||||
|
line: string;
|
||||||
|
buildId: string;
|
||||||
|
applicationId: string;
|
||||||
|
}): Promise<any> => {
|
||||||
|
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||||
|
const regex = /ghs_.*@/g;
|
||||||
|
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||||
|
}
|
||||||
|
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||||
|
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||||
|
return await prisma.buildLog.create({
|
||||||
|
data: {
|
||||||
|
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function copyBaseConfigurationFiles(
|
||||||
|
buildPack,
|
||||||
|
workdir,
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
baseImage
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
if (buildPack === 'php') {
|
||||||
|
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
||||||
|
await saveBuildLog({
|
||||||
|
line: 'Copied default configuration file for PHP.',
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
} else if (baseImage?.includes('nginx')) {
|
||||||
|
await fs.writeFile(
|
||||||
|
`${workdir}/nginx.conf`,
|
||||||
|
`user nginx;
|
||||||
|
worker_processes auto;
|
||||||
|
|
||||||
|
error_log /docker.stdout;
|
||||||
|
pid /run/nginx.pid;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||||
|
'$status $body_bytes_sent "$http_referer" '
|
||||||
|
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||||
|
|
||||||
|
access_log /docker.stdout main;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
tcp_nopush on;
|
||||||
|
tcp_nodelay on;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
types_hash_max_size 2048;
|
||||||
|
|
||||||
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
root /app;
|
||||||
|
index index.html;
|
||||||
|
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||||
|
}
|
||||||
|
|
||||||
|
error_page 404 /50x.html;
|
||||||
|
|
||||||
|
# redirect server error pages to the static page /50x.html
|
||||||
|
#
|
||||||
|
error_page 500 502 503 504 /50x.html;
|
||||||
|
location = /50x.html {
|
||||||
|
root /app;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
||||||
|
return (
|
||||||
|
installCommand?.includes('pnpm') ||
|
||||||
|
buildCommand?.includes('pnpm') ||
|
||||||
|
startCommand?.includes('pnpm')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export async function buildImage({
|
||||||
|
applicationId,
|
||||||
|
tag,
|
||||||
|
workdir,
|
||||||
|
docker,
|
||||||
|
buildId,
|
||||||
|
isCache = false,
|
||||||
|
debug = false,
|
||||||
|
dockerFileLocation = '/Dockerfile'
|
||||||
|
}) {
|
||||||
|
if (isCache) {
|
||||||
|
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
|
||||||
|
} else {
|
||||||
|
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||||
|
}
|
||||||
|
if (!debug && isCache) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Debug turned off. To see more details, allow it in the configuration.`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const stream = await docker.engine.buildImage(
|
||||||
|
{ src: ['.'], context: workdir },
|
||||||
|
{
|
||||||
|
dockerfile: isCache ? `${dockerFileLocation}-cache` : dockerFileLocation,
|
||||||
|
t: `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
||||||
|
}
|
||||||
|
);
|
||||||
|
await streamEvents({ stream, docker, buildId, applicationId, debug });
|
||||||
|
await saveBuildLog({ line: `Building image successful!`, buildId, applicationId });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
docker.engine.modem.followProgress(stream, onFinished, onProgress);
|
||||||
|
function onFinished(err, res) {
|
||||||
|
if (err) reject(err);
|
||||||
|
resolve(res);
|
||||||
|
}
|
||||||
|
async function onProgress(event) {
|
||||||
|
if (event.error) {
|
||||||
|
reject(event.error);
|
||||||
|
} else if (event.stream) {
|
||||||
|
if (event.stream !== '\n') {
|
||||||
|
if (debug)
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `${event.stream.replace('\n', '')}`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeLabelForStandaloneApplication({
|
||||||
|
applicationId,
|
||||||
|
fqdn,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId = null,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
port,
|
||||||
|
commit,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory
|
||||||
|
}) {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
|
||||||
|
const domain = getDomain(fqdn);
|
||||||
|
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
||||||
|
}
|
||||||
|
return [
|
||||||
|
'coolify.managed=true',
|
||||||
|
`coolify.version=${version}`,
|
||||||
|
`coolify.type=standalone-application`,
|
||||||
|
`coolify.configuration=${base64Encode(
|
||||||
|
JSON.stringify({
|
||||||
|
applicationId,
|
||||||
|
fqdn,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
pullmergeRequestId,
|
||||||
|
buildPack,
|
||||||
|
repository,
|
||||||
|
branch,
|
||||||
|
projectId,
|
||||||
|
port,
|
||||||
|
commit,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory
|
||||||
|
})
|
||||||
|
)}`
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||||
|
const {
|
||||||
|
applicationId,
|
||||||
|
tag,
|
||||||
|
workdir,
|
||||||
|
docker,
|
||||||
|
buildId,
|
||||||
|
baseDirectory,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
debug,
|
||||||
|
secrets,
|
||||||
|
pullmergeRequestId
|
||||||
|
} = data;
|
||||||
|
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||||
|
const Dockerfile: Array<string> = [];
|
||||||
|
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||||
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (secret.isBuildSecret) {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
if (secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (isPnpm) {
|
||||||
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
|
}
|
||||||
|
if (installCommand) {
|
||||||
|
Dockerfile.push(`COPY .${baseDirectory || ''}/package.json ./`);
|
||||||
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
|
}
|
||||||
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
|
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||||
|
const { applicationId, tag, workdir, docker, buildId, debug, secrets, pullmergeRequestId } = data;
|
||||||
|
const Dockerfile: Array<string> = [];
|
||||||
|
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||||
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (secret.isBuildSecret) {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
if (secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||||
|
Dockerfile.push(`COPY resources /app/resources`);
|
||||||
|
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||||
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
|
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||||
|
const {
|
||||||
|
applicationId,
|
||||||
|
tag,
|
||||||
|
workdir,
|
||||||
|
docker,
|
||||||
|
buildId,
|
||||||
|
baseDirectory,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
debug,
|
||||||
|
secrets
|
||||||
|
} = data;
|
||||||
|
const Dockerfile: Array<string> = [];
|
||||||
|
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||||
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
Dockerfile.push('RUN cargo install cargo-chef');
|
||||||
|
Dockerfile.push('COPY . .');
|
||||||
|
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
|
||||||
|
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||||
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
Dockerfile.push('RUN cargo install cargo-chef');
|
||||||
|
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||||
|
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||||
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
|
await buildImage({ applicationId, tag, workdir, docker, buildId, isCache: true, debug });
|
||||||
|
}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
@@ -42,9 +42,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
|
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
|
||||||
Dockerfile.push(`RUN deno cache deps.ts`);
|
Dockerfile.push(`RUN deno cache deps.ts`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY ${denoMainFile} /app`);
|
|
||||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
|
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||||
Dockerfile.push(`ENV NO_COLOR true`);
|
Dockerfile.push(`ENV NO_COLOR true`);
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
|
Dockerfile.push(`CMD deno run ${denoOptions ? denoOptions.split(' ') : ''} ${denoMainFile}`);
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildImage } from './common';
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -1,26 +1,26 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
||||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
|
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
if (baseImage.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { baseImage, baseBuildImage } = data;
|
const { baseImage, baseBuildImage } = data;
|
||||||
await buildCacheImageWithNode(data, baseImage);
|
await buildCacheImageWithNode(data, baseBuildImage);
|
||||||
await createDockerfile(data, baseBuildImage);
|
await createDockerfile(data, baseImage);
|
||||||
await buildImage(data);
|
await buildImage(data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
import { buildCacheImageForLaravel, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageForLaravel, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const { workdir, applicationId, tag, buildId } = data;
|
const { workdir, applicationId, tag, buildId, port } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
@@ -24,7 +24,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||||
);
|
);
|
||||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||||
@@ -10,8 +10,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
if (isPnpm) {
|
if (isPnpm) {
|
||||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
Dockerfile.push('RUN pnpm add -g pnpm');
|
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
||||||
|
|
||||||
@@ -1,18 +1,22 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { checkPnpm } from './common';
|
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
|
applicationId,
|
||||||
buildId,
|
buildId,
|
||||||
|
tag,
|
||||||
workdir,
|
workdir,
|
||||||
|
publishDirectory,
|
||||||
port,
|
port,
|
||||||
installCommand,
|
installCommand,
|
||||||
buildCommand,
|
buildCommand,
|
||||||
startCommand,
|
startCommand,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
secrets,
|
secrets,
|
||||||
pullmergeRequestId
|
pullmergeRequestId,
|
||||||
|
deploymentType,
|
||||||
|
baseImage
|
||||||
} = data;
|
} = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||||
@@ -35,25 +39,36 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (isPnpm) {
|
if (isPnpm) {
|
||||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
Dockerfile.push('RUN pnpm add -g pnpm');
|
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
if (deploymentType === 'node') {
|
||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
if (buildCommand) {
|
|
||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
|
} else if (deploymentType === 'static') {
|
||||||
|
if (baseImage?.includes('nginx')) {
|
||||||
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
|
}
|
||||||
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
|
Dockerfile.push(`EXPOSE 80`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { baseImage, baseBuildImage } = data;
|
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||||
await createDockerfile(data, baseImage);
|
if (deploymentType === 'node') {
|
||||||
await buildImage(data);
|
await createDockerfile(data, baseImage);
|
||||||
|
await buildImage(data);
|
||||||
|
} else if (deploymentType === 'static') {
|
||||||
|
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||||
|
await createDockerfile(data, baseImage);
|
||||||
|
await buildImage(data);
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { checkPnpm } from './common';
|
import { buildImage, checkPnpm } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
@@ -36,8 +35,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (isPnpm) {
|
if (isPnpm) {
|
||||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
Dockerfile.push('RUN pnpm add -g pnpm');
|
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
@@ -51,7 +49,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { baseImage, baseBuildImage } = data;
|
const { baseImage } = data;
|
||||||
await createDockerfile(data, baseImage);
|
await createDockerfile(data, baseImage);
|
||||||
await buildImage(data);
|
await buildImage(data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { checkPnpm } from './common';
|
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
|
applicationId,
|
||||||
|
buildId,
|
||||||
|
tag,
|
||||||
workdir,
|
workdir,
|
||||||
|
publishDirectory,
|
||||||
port,
|
port,
|
||||||
installCommand,
|
installCommand,
|
||||||
buildCommand,
|
buildCommand,
|
||||||
@@ -12,7 +15,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
baseDirectory,
|
baseDirectory,
|
||||||
secrets,
|
secrets,
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
buildId
|
deploymentType,
|
||||||
|
baseImage
|
||||||
} = data;
|
} = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||||
@@ -35,24 +39,36 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (isPnpm) {
|
if (isPnpm) {
|
||||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm');
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
Dockerfile.push('RUN pnpm add -g pnpm');
|
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
if (deploymentType === 'node') {
|
||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
if (buildCommand) {
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
|
} else if (deploymentType === 'static') {
|
||||||
|
if (baseImage?.includes('nginx')) {
|
||||||
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
|
}
|
||||||
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
|
Dockerfile.push(`EXPOSE 80`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { baseImage, baseBuildImage } = data;
|
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||||
await createDockerfile(data, baseImage);
|
if (deploymentType === 'node') {
|
||||||
await buildImage(data);
|
await createDockerfile(data, baseImage);
|
||||||
|
await buildImage(data);
|
||||||
|
} else if (deploymentType === 'static') {
|
||||||
|
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||||
|
await createDockerfile(data, baseImage);
|
||||||
|
await buildImage(data);
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||||
const { workdir, baseDirectory, buildId } = data;
|
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
let composerFound = false;
|
let composerFound = false;
|
||||||
try {
|
try {
|
||||||
@@ -12,6 +12,21 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
|||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (secret.isBuildSecret) {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
if (secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
|
||||||
if (htaccessFound) {
|
if (htaccessFound) {
|
||||||
@@ -22,7 +37,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
@@ -34,6 +34,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
}
|
}
|
||||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||||
Dockerfile.push(`RUN pip install gunicorn`);
|
Dockerfile.push(`RUN pip install gunicorn`);
|
||||||
|
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||||
|
Dockerfile.push(`RUN pip install uvicorn`);
|
||||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||||
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
||||||
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
|
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
|
||||||
@@ -50,6 +52,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||||
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
||||||
|
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||||
|
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
|
||||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||||
Dockerfile.push(
|
Dockerfile.push(
|
||||||
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
||||||
@@ -1,18 +1,18 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
|
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
if (baseImage.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { asyncExecShell } from '$lib/common';
|
|
||||||
import { buildCacheImageWithCargo, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import TOML from '@iarna/toml';
|
import TOML from '@iarna/toml';
|
||||||
|
import { asyncExecShell } from '../common';
|
||||||
|
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||||
const { workdir, port, applicationId, tag, buildId } = data;
|
const { workdir, port, applicationId, tag, buildId } = data;
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const {
|
const {
|
||||||
@@ -12,7 +12,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
secrets,
|
secrets,
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
baseImage,
|
baseImage,
|
||||||
buildId
|
buildId,
|
||||||
|
port
|
||||||
} = data;
|
} = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
@@ -39,10 +40,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
}
|
}
|
||||||
if (baseImage.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1,18 +1,18 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
|
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
if (baseImage.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1,18 +1,18 @@
|
|||||||
import { buildCacheImageWithNode, buildImage } from '$lib/docker';
|
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
|
import { buildCacheImageWithNode, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image): Promise<void> => {
|
const createDockerfile = async (data, image): Promise<void> => {
|
||||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId } = data;
|
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||||
if (baseImage.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
1481
apps/api/src/lib/common.ts
Normal file
1481
apps/api/src/lib/common.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,4 +4,4 @@ import relativeTime from 'dayjs/plugin/relativeTime.js';
|
|||||||
dayjs.extend(utc);
|
dayjs.extend(utc);
|
||||||
dayjs.extend(relativeTime);
|
dayjs.extend(relativeTime);
|
||||||
|
|
||||||
export { dayjs };
|
export { dayjs as day };
|
||||||
78
apps/api/src/lib/docker.ts
Normal file
78
apps/api/src/lib/docker.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import { asyncExecShell } from './common';
|
||||||
|
import Dockerode from 'dockerode';
|
||||||
|
export function getEngine(engine: string): string {
|
||||||
|
return engine === '/var/run/docker.sock' ? 'unix:///var/run/docker.sock' : engine;
|
||||||
|
}
|
||||||
|
export function dockerInstance({ destinationDocker }): { engine: Dockerode; network: string } {
|
||||||
|
return {
|
||||||
|
engine: new Dockerode({
|
||||||
|
socketPath: destinationDocker.engine
|
||||||
|
}),
|
||||||
|
network: destinationDocker.network
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkContainer(engine: string, container: string, remove = false): Promise<boolean> {
|
||||||
|
const host = getEngine(engine);
|
||||||
|
let containerFound = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { stdout } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker inspect --format '{{json .State}}' ${container}`
|
||||||
|
);
|
||||||
|
const parsedStdout = JSON.parse(stdout);
|
||||||
|
const status = parsedStdout.Status;
|
||||||
|
const isRunning = status === 'running';
|
||||||
|
if (status === 'created') {
|
||||||
|
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
|
||||||
|
}
|
||||||
|
if (remove && status === 'exited') {
|
||||||
|
await asyncExecShell(`DOCKER_HOST="${host}" docker rm ${container}`);
|
||||||
|
}
|
||||||
|
if (isRunning) {
|
||||||
|
containerFound = true;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// Container not found
|
||||||
|
}
|
||||||
|
return containerFound;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function isContainerExited(engine: string, containerName: string): Promise<boolean> {
|
||||||
|
let isExited = false;
|
||||||
|
const host = getEngine(engine);
|
||||||
|
try {
|
||||||
|
const { stdout } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker inspect -f '{{.State.Status}}' ${containerName}`
|
||||||
|
);
|
||||||
|
if (stdout.trim() === 'exited') {
|
||||||
|
isExited = true;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
return isExited;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function removeContainer({
|
||||||
|
id,
|
||||||
|
engine
|
||||||
|
}: {
|
||||||
|
id: string;
|
||||||
|
engine: string;
|
||||||
|
}): Promise<void> {
|
||||||
|
const host = getEngine(engine);
|
||||||
|
try {
|
||||||
|
const { stdout } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
|
||||||
|
);
|
||||||
|
if (JSON.parse(stdout).Running) {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker stop -t 0 ${id}`);
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker rm ${id}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { asyncExecShell, saveBuildLog } from '$lib/common';
|
|
||||||
import got from 'got';
|
|
||||||
import jsonwebtoken from 'jsonwebtoken';
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import * as db from '$lib/database';
|
import { saveBuildLog } from '../buildPacks/common';
|
||||||
|
import { asyncExecShell, decrypt, prisma } from '../common';
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -11,7 +11,8 @@ export default async function ({
|
|||||||
apiUrl,
|
apiUrl,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
branch,
|
branch,
|
||||||
buildId
|
buildId,
|
||||||
|
customPort
|
||||||
}: {
|
}: {
|
||||||
applicationId: string;
|
applicationId: string;
|
||||||
workdir: string;
|
workdir: string;
|
||||||
@@ -21,10 +22,16 @@ export default async function ({
|
|||||||
htmlUrl: string;
|
htmlUrl: string;
|
||||||
branch: string;
|
branch: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
|
customPort: number;
|
||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
|
const { default: got } = await import('got')
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||||
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
||||||
const { privateKey, appId, installationId } = await db.getUniqueGithubApp({ githubAppId });
|
|
||||||
|
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||||
|
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||||
|
const { privateKey, appId, installationId } = body
|
||||||
|
|
||||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||||
|
|
||||||
const payload = {
|
const payload = {
|
||||||
@@ -49,7 +56,7 @@ export default async function ({
|
|||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
await asyncExecShell(
|
await asyncExecShell(
|
||||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||||
);
|
);
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { asyncExecShell, saveBuildLog } from '$lib/common';
|
import { saveBuildLog } from "../buildPacks/common";
|
||||||
|
import { asyncExecShell } from "../common";
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -8,7 +9,8 @@ export default async function ({
|
|||||||
repository,
|
repository,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
privateSshKey
|
privateSshKey,
|
||||||
|
customPort
|
||||||
}: {
|
}: {
|
||||||
applicationId: string;
|
applicationId: string;
|
||||||
workdir: string;
|
workdir: string;
|
||||||
@@ -18,6 +20,7 @@ export default async function ({
|
|||||||
buildId: string;
|
buildId: string;
|
||||||
repodir: string;
|
repodir: string;
|
||||||
privateSshKey: string;
|
privateSshKey: string;
|
||||||
|
customPort: number;
|
||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
||||||
@@ -31,7 +34,7 @@ export default async function ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
await asyncExecShell(
|
await asyncExecShell(
|
||||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||||
);
|
);
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
52
apps/api/src/lib/scheduler.ts
Normal file
52
apps/api/src/lib/scheduler.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import Bree from 'bree';
|
||||||
|
import path from 'path';
|
||||||
|
import Cabin from 'cabin';
|
||||||
|
import TSBree from '@breejs/ts-worker';
|
||||||
|
import { isDev } from './common';
|
||||||
|
|
||||||
|
Bree.extend(TSBree);
|
||||||
|
|
||||||
|
const options: any = {
|
||||||
|
defaultExtension: 'js',
|
||||||
|
logger: false,
|
||||||
|
workerMessageHandler: async ({ name, message }) => {
|
||||||
|
if (name === 'deployApplication') {
|
||||||
|
if (message.pending === 0 && message.size === 0) {
|
||||||
|
if (message.caller === 'autoUpdater') {
|
||||||
|
if (!scheduler.workers.has('autoUpdater')) {
|
||||||
|
await scheduler.stop('deployApplication');
|
||||||
|
await scheduler.run('autoUpdater')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (message.caller === 'cleanupStorage') {
|
||||||
|
if (!scheduler.workers.has('cleanupStorage')) {
|
||||||
|
await scheduler.stop('deployApplication');
|
||||||
|
await scheduler.run('cleanupStorage')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
jobs: [
|
||||||
|
{
|
||||||
|
name: 'deployApplication'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'cleanupStorage',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'checkProxies',
|
||||||
|
interval: '10s'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'autoUpdater',
|
||||||
|
}
|
||||||
|
],
|
||||||
|
};
|
||||||
|
if (isDev) options.root = path.join(__dirname, '../jobs');
|
||||||
|
|
||||||
|
|
||||||
|
export const scheduler = new Bree(options);
|
||||||
|
|
||||||
|
|
||||||
415
apps/api/src/lib/serviceFields.ts
Normal file
415
apps/api/src/lib/serviceFields.ts
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
// Example:
|
||||||
|
// export const nocodb = [{
|
||||||
|
// name: 'postgreslUser',
|
||||||
|
// isEditable: false,
|
||||||
|
// isLowerCase: false,
|
||||||
|
// isNumber: false,
|
||||||
|
// isBoolean: false,
|
||||||
|
// isEncrypted: false
|
||||||
|
// }]
|
||||||
|
|
||||||
|
export const plausibleAnalytics = [{
|
||||||
|
name: 'email',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},{
|
||||||
|
name: 'username',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'password',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlDatabase',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlPublicPort',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: true,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'secretKeyBase',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'scriptName',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
}]
|
||||||
|
export const minio = [{
|
||||||
|
name: 'apiFqdn',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},{
|
||||||
|
name: 'rootUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'rootUserPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const vscodeserver = [{
|
||||||
|
name: 'password',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const wordpress = [{
|
||||||
|
name: 'extraConfig',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlHost',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlPort',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: true,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlUser',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlPassword',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlRootUser',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlRootUserPassword',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysqlDatabase',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'ftpPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const ghost = [{
|
||||||
|
name: 'defaultEmail',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'defaultPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadbUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadbPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadbRootUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadbRootUserPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadbDatabase',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
}]
|
||||||
|
export const meiliSearch = [{
|
||||||
|
name: 'masterKey',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const umami = [{
|
||||||
|
name: 'postgresqlUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlDatabase',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'umamiAdminPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'hashSalt',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const hasura = [{
|
||||||
|
name: 'postgresqlUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlDatabase',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'graphQLAdminPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
}]
|
||||||
|
export const fider = [{
|
||||||
|
name: 'jwtSecret',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},{
|
||||||
|
name: 'postgreslUser',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgresqlPassword',
|
||||||
|
isEditable: false,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailNoreply',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailSmtpHost',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailSmtpPassword',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailSmtpPort',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: true,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailSmtpUser',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailSmtpEnableStartTls',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: true,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailMailgunApiKey',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailMailgunDomain',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: true,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'emailMailgunRegion',
|
||||||
|
isEditable: true,
|
||||||
|
isLowerCase: false,
|
||||||
|
isNumber: false,
|
||||||
|
isBoolean: false,
|
||||||
|
isEncrypted: false
|
||||||
|
}]
|
||||||
34
apps/api/src/plugins/jwt.ts
Normal file
34
apps/api/src/plugins/jwt.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import fp from 'fastify-plugin'
|
||||||
|
import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt'
|
||||||
|
|
||||||
|
declare module "@fastify/jwt" {
|
||||||
|
interface FastifyJWT {
|
||||||
|
user: {
|
||||||
|
userId: string,
|
||||||
|
teamId: string,
|
||||||
|
permission: string,
|
||||||
|
isAdmin: boolean
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default fp<FastifyJWTOptions>(async (fastify, opts) => {
|
||||||
|
fastify.register(fastifyJwt, {
|
||||||
|
secret: fastify.config.COOLIFY_SECRET_KEY
|
||||||
|
})
|
||||||
|
|
||||||
|
fastify.decorate("authenticate", async function (request, reply) {
|
||||||
|
try {
|
||||||
|
await request.jwtVerify()
|
||||||
|
} catch (err) {
|
||||||
|
console.log(err)
|
||||||
|
reply.send(err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
declare module 'fastify' {
|
||||||
|
export interface FastifyInstance {
|
||||||
|
authenticate(): Promise<void>
|
||||||
|
}
|
||||||
|
}
|
||||||
907
apps/api/src/routes/api/v1/applications/handlers.ts
Normal file
907
apps/api/src/routes/api/v1/applications/handlers.ts
Normal file
@@ -0,0 +1,907 @@
|
|||||||
|
import cuid from 'cuid';
|
||||||
|
import crypto from 'node:crypto'
|
||||||
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
|
import axios from 'axios';
|
||||||
|
import { day } from '../../../../lib/dayjs';
|
||||||
|
|
||||||
|
|
||||||
|
import type { FastifyRequest } from 'fastify';
|
||||||
|
import { FastifyReply } from 'fastify';
|
||||||
|
|
||||||
|
import { CheckDNS, DeleteApplication, DeployApplication, GetApplication, SaveApplication, SaveApplicationSettings } from '.';
|
||||||
|
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||||
|
import { asyncExecShell, checkDomainsIsValidInDNS, checkDoubleBranch, decrypt, encrypt, errorHandler, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||||
|
import { checkContainer, dockerInstance, getEngine, isContainerExited, removeContainer } from '../../../../lib/docker';
|
||||||
|
import { scheduler } from '../../../../lib/scheduler';
|
||||||
|
|
||||||
|
|
||||||
|
export async function listApplications(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { teamId } = request.user
|
||||||
|
const applications = await prisma.application.findMany({
|
||||||
|
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { teams: true }
|
||||||
|
});
|
||||||
|
const settings = await prisma.setting.findFirst()
|
||||||
|
return {
|
||||||
|
applications,
|
||||||
|
settings
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getImages(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { buildPack, deploymentType } = request.body
|
||||||
|
let publishDirectory = undefined;
|
||||||
|
let port = undefined
|
||||||
|
const { baseImage, baseBuildImage, baseBuildImages, baseImages, } = setDefaultBaseImage(
|
||||||
|
buildPack, deploymentType
|
||||||
|
);
|
||||||
|
if (buildPack === 'nextjs') {
|
||||||
|
if (deploymentType === 'static') {
|
||||||
|
publishDirectory = 'out'
|
||||||
|
port = '80'
|
||||||
|
} else {
|
||||||
|
publishDirectory = ''
|
||||||
|
port = '3000'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (buildPack === 'nuxtjs') {
|
||||||
|
if (deploymentType === 'static') {
|
||||||
|
publishDirectory = 'dist'
|
||||||
|
port = '80'
|
||||||
|
} else {
|
||||||
|
publishDirectory = ''
|
||||||
|
port = '3000'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return { baseImage, baseBuildImage, baseBuildImages, baseImages, publishDirectory, port }
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getApplication(request: FastifyRequest<GetApplication>) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { teamId } = request.user
|
||||||
|
const appId = process.env['COOLIFY_APP_ID'];
|
||||||
|
let isRunning = false;
|
||||||
|
let isExited = false;
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
if (application?.destinationDockerId && application.destinationDocker?.engine) {
|
||||||
|
isRunning = await checkContainer(application.destinationDocker.engine, id);
|
||||||
|
isExited = await isContainerExited(application.destinationDocker.engine, id);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
isQueueActive: scheduler.workers.has('deployApplication'),
|
||||||
|
isRunning,
|
||||||
|
isExited,
|
||||||
|
application,
|
||||||
|
appId
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function newApplication(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const name = uniqueName();
|
||||||
|
const { teamId } = request.user
|
||||||
|
const { id } = await prisma.application.create({
|
||||||
|
data: {
|
||||||
|
name,
|
||||||
|
teams: { connect: { id: teamId } },
|
||||||
|
settings: { create: { debug: false, previews: false } }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send({ id });
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function decryptApplication(application: any) {
|
||||||
|
if (application) {
|
||||||
|
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||||
|
application.gitSource.githubApp.clientSecret = decrypt(application.gitSource.githubApp.clientSecret) || null;
|
||||||
|
}
|
||||||
|
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||||
|
application.gitSource.githubApp.webhookSecret = decrypt(application.gitSource.githubApp.webhookSecret) || null;
|
||||||
|
}
|
||||||
|
if (application?.gitSource?.githubApp?.privateKey) {
|
||||||
|
application.gitSource.githubApp.privateKey = decrypt(application.gitSource.githubApp.privateKey) || null;
|
||||||
|
}
|
||||||
|
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||||
|
application.gitSource.gitlabApp.appSecret = decrypt(application.gitSource.gitlabApp.appSecret) || null;
|
||||||
|
}
|
||||||
|
if (application?.secrets.length > 0) {
|
||||||
|
application.secrets = application.secrets.map((s: any) => {
|
||||||
|
s.value = decrypt(s.value) || null
|
||||||
|
return s;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return application;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getApplicationFromDB(id: string, teamId: string) {
|
||||||
|
try {
|
||||||
|
let application = await prisma.application.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: {
|
||||||
|
destinationDocker: true,
|
||||||
|
settings: true,
|
||||||
|
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||||
|
secrets: true,
|
||||||
|
persistentStorage: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!application) {
|
||||||
|
throw { status: 404, message: 'Application not found.' };
|
||||||
|
}
|
||||||
|
application = decryptApplication(application);
|
||||||
|
const buildPack = application?.buildPack || null;
|
||||||
|
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||||
|
buildPack
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set default build images
|
||||||
|
if (!application.baseImage) {
|
||||||
|
application.baseImage = baseImage;
|
||||||
|
}
|
||||||
|
if (!application.baseBuildImage) {
|
||||||
|
application.baseBuildImage = baseBuildImage;
|
||||||
|
}
|
||||||
|
return { ...application, baseBuildImages, baseImages };
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getApplicationFromDBWebhook(projectId: number, branch: string) {
|
||||||
|
try {
|
||||||
|
let application = await prisma.application.findFirst({
|
||||||
|
where: { projectId, branch, settings: { autodeploy: true } },
|
||||||
|
include: {
|
||||||
|
destinationDocker: true,
|
||||||
|
settings: true,
|
||||||
|
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||||
|
secrets: true,
|
||||||
|
persistentStorage: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!application) {
|
||||||
|
throw { status: 500, message: 'Application not configured.' }
|
||||||
|
}
|
||||||
|
application = decryptApplication(application);
|
||||||
|
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||||
|
application.buildPack
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set default build images
|
||||||
|
if (!application.baseImage) {
|
||||||
|
application.baseImage = baseImage;
|
||||||
|
}
|
||||||
|
if (!application.baseBuildImage) {
|
||||||
|
application.baseBuildImage = baseBuildImage;
|
||||||
|
}
|
||||||
|
return { ...application, baseBuildImages, baseImages };
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let {
|
||||||
|
name,
|
||||||
|
buildPack,
|
||||||
|
fqdn,
|
||||||
|
port,
|
||||||
|
exposePort,
|
||||||
|
installCommand,
|
||||||
|
buildCommand,
|
||||||
|
startCommand,
|
||||||
|
baseDirectory,
|
||||||
|
publishDirectory,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile,
|
||||||
|
denoOptions,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType
|
||||||
|
} = request.body
|
||||||
|
|
||||||
|
if (port) port = Number(port);
|
||||||
|
if (exposePort) {
|
||||||
|
exposePort = Number(exposePort);
|
||||||
|
}
|
||||||
|
if (denoOptions) denoOptions = denoOptions.trim();
|
||||||
|
const defaultConfiguration = await setDefaultConfiguration({
|
||||||
|
buildPack,
|
||||||
|
port,
|
||||||
|
installCommand,
|
||||||
|
startCommand,
|
||||||
|
buildCommand,
|
||||||
|
publishDirectory,
|
||||||
|
baseDirectory,
|
||||||
|
dockerFileLocation,
|
||||||
|
denoMainFile
|
||||||
|
});
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
name,
|
||||||
|
fqdn,
|
||||||
|
exposePort,
|
||||||
|
pythonWSGI,
|
||||||
|
pythonModule,
|
||||||
|
pythonVariable,
|
||||||
|
denoOptions,
|
||||||
|
baseImage,
|
||||||
|
baseBuildImage,
|
||||||
|
deploymentType,
|
||||||
|
...defaultConfiguration
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send();
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { debug, previews, dualCerts, autodeploy, branch, projectId } = request.body
|
||||||
|
const isDouble = await checkDoubleBranch(branch, projectId);
|
||||||
|
if (isDouble && autodeploy) {
|
||||||
|
throw { status: 500, message: 'Application not configured.' }
|
||||||
|
}
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { settings: { update: { debug, previews, dualCerts, autodeploy } } },
|
||||||
|
include: { destinationDocker: true }
|
||||||
|
});
|
||||||
|
return reply.code(201).send();
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function stopApplication(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { teamId } = request.user
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
if (application?.destinationDockerId && application.destinationDocker?.engine) {
|
||||||
|
const { engine } = application.destinationDocker;
|
||||||
|
const found = await checkContainer(engine, id);
|
||||||
|
if (found) {
|
||||||
|
await removeContainer({ id, engine });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reply.code(201).send();
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteApplication(request: FastifyRequest<DeleteApplication>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { teamId } = request.user
|
||||||
|
const application = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { destinationDocker: true }
|
||||||
|
});
|
||||||
|
if (application?.destinationDockerId && application.destinationDocker?.engine && application.destinationDocker?.network) {
|
||||||
|
const host = getEngine(application.destinationDocker.engine);
|
||||||
|
const { stdout: containers } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||||
|
);
|
||||||
|
if (containers) {
|
||||||
|
const containersArray = containers.trim().split('\n');
|
||||||
|
for (const container of containersArray) {
|
||||||
|
const containerObj = JSON.parse(container);
|
||||||
|
const id = containerObj.ID;
|
||||||
|
await removeContainer({ id, engine: application.destinationDocker.engine });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await prisma.applicationSettings.deleteMany({ where: { application: { id } } });
|
||||||
|
await prisma.buildLog.deleteMany({ where: { applicationId: id } });
|
||||||
|
await prisma.build.deleteMany({ where: { applicationId: id } });
|
||||||
|
await prisma.secret.deleteMany({ where: { applicationId: id } });
|
||||||
|
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
|
||||||
|
if (teamId === '0') {
|
||||||
|
await prisma.application.deleteMany({ where: { id } });
|
||||||
|
} else {
|
||||||
|
await prisma.application.deleteMany({ where: { id, teams: { some: { id: teamId } } } });
|
||||||
|
}
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkDNS(request: FastifyRequest<CheckDNS>) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
|
||||||
|
let { exposePort, fqdn, forceSave, dualCerts } = request.body
|
||||||
|
fqdn = fqdn.toLowerCase();
|
||||||
|
|
||||||
|
const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
|
||||||
|
const found = await isDomainConfigured({ id, fqdn });
|
||||||
|
if (found) {
|
||||||
|
throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` }
|
||||||
|
}
|
||||||
|
if (exposePort) {
|
||||||
|
exposePort = Number(exposePort);
|
||||||
|
|
||||||
|
if (exposePort < 1024 || exposePort > 65535) {
|
||||||
|
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` }
|
||||||
|
}
|
||||||
|
const { default: getPort } = await import('get-port');
|
||||||
|
const publicPort = await getPort({ port: exposePort });
|
||||||
|
if (publicPort !== exposePort) {
|
||||||
|
throw { status: 500, message: `Port ${exposePort} is already in use.` }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||||
|
return await checkDomainsIsValidInDNS({ hostname: request.hostname.split(':')[0], fqdn, dualCerts });
|
||||||
|
}
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getUsage(request) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
let usage = {};
|
||||||
|
if (application.destinationDockerId) {
|
||||||
|
[usage] = await Promise.all([getContainerUsage(application.destinationDocker.engine, id)]);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
usage
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deployApplication(request: FastifyRequest<DeployApplication>) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
|
||||||
|
const { pullmergeRequestId = null, branch } = request.body
|
||||||
|
const buildId = cuid();
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
if (application) {
|
||||||
|
if (!application?.configHash) {
|
||||||
|
const configHash = crypto.createHash('sha256')
|
||||||
|
.update(
|
||||||
|
JSON.stringify({
|
||||||
|
buildPack: application.buildPack,
|
||||||
|
port: application.port,
|
||||||
|
exposePort: application.exposePort,
|
||||||
|
installCommand: application.installCommand,
|
||||||
|
buildCommand: application.buildCommand,
|
||||||
|
startCommand: application.startCommand
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.digest('hex');
|
||||||
|
await prisma.application.update({ where: { id }, data: { configHash } });
|
||||||
|
}
|
||||||
|
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
||||||
|
await prisma.build.create({
|
||||||
|
data: {
|
||||||
|
id: buildId,
|
||||||
|
applicationId: id,
|
||||||
|
branch: application.branch,
|
||||||
|
destinationDockerId: application.destinationDocker?.id,
|
||||||
|
gitSourceId: application.gitSource?.id,
|
||||||
|
githubAppId: application.gitSource?.githubApp?.id,
|
||||||
|
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||||
|
status: 'queued',
|
||||||
|
type: 'manual'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
scheduler.workers.get('deployApplication').postMessage({
|
||||||
|
build_id: buildId,
|
||||||
|
type: 'manual',
|
||||||
|
...application,
|
||||||
|
sourceBranch: branch,
|
||||||
|
pullmergeRequestId
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
scheduler.workers.get('deployApplication').postMessage({
|
||||||
|
build_id: buildId,
|
||||||
|
type: 'manual',
|
||||||
|
...application
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
buildId
|
||||||
|
};
|
||||||
|
}
|
||||||
|
throw { status: 500, message: 'Application not found!' }
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export async function saveApplicationSource(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { gitSourceId } = request.body
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { gitSource: { connect: { id: gitSourceId } } }
|
||||||
|
});
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getGitHubToken(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { teamId } = request.user
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
const payload = {
|
||||||
|
iat: Math.round(new Date().getTime() / 1000),
|
||||||
|
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||||
|
iss: application.gitSource.githubApp.appId
|
||||||
|
};
|
||||||
|
const githubToken = jsonwebtoken.sign(payload, application.gitSource.githubApp.privateKey, {
|
||||||
|
algorithm: 'RS256'
|
||||||
|
});
|
||||||
|
const { data } = await axios.post(`${application.gitSource.apiUrl}/app/installations/${application.gitSource.githubApp.installationId}/access_tokens`, {}, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${githubToken}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return reply.code(201).send({
|
||||||
|
token: data.token
|
||||||
|
})
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkRepository(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { repository, branch } = request.query
|
||||||
|
const application = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { gitSource: true }
|
||||||
|
});
|
||||||
|
const found = await prisma.application.findFirst({
|
||||||
|
where: { branch, repository, gitSource: { type: application.gitSource.type }, id: { not: id } }
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
used: found ? true : false
|
||||||
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveRepository(request, reply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { repository, branch, projectId, autodeploy, webhookToken } = request.body
|
||||||
|
|
||||||
|
repository = repository.toLowerCase();
|
||||||
|
branch = branch.toLowerCase();
|
||||||
|
projectId = Number(projectId);
|
||||||
|
if (webhookToken) {
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { repository, branch, projectId, gitSource: { update: { gitlabApp: { update: { webhookToken: webhookToken ? webhookToken : undefined } } } }, settings: { update: { autodeploy } } }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { repository, branch, projectId, settings: { update: { autodeploy } } }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { destinationId } = request.body
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { destinationDocker: { connect: { id: destinationId } } }
|
||||||
|
});
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getBuildPack(request) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
const application = await getApplicationFromDB(id, teamId);
|
||||||
|
return {
|
||||||
|
type: application.gitSource.type,
|
||||||
|
projectId: application.projectId,
|
||||||
|
repository: application.repository,
|
||||||
|
branch: application.branch,
|
||||||
|
apiUrl: application.gitSource.apiUrl
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveBuildPack(request, reply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { buildPack } = request.body
|
||||||
|
await prisma.application.update({ where: { id }, data: { buildPack } });
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getSecrets(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let secrets = await prisma.secret.findMany({
|
||||||
|
where: { applicationId: id },
|
||||||
|
orderBy: { createdAt: 'desc' }
|
||||||
|
});
|
||||||
|
secrets = secrets.map((secret) => {
|
||||||
|
secret.value = decrypt(secret.value);
|
||||||
|
return secret;
|
||||||
|
});
|
||||||
|
secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => {
|
||||||
|
return ('' + a.name).localeCompare(b.name);
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
secrets
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveSecret(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body
|
||||||
|
|
||||||
|
if (isNew) {
|
||||||
|
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
|
||||||
|
if (found) {
|
||||||
|
throw { status: 500, message: `Secret ${name} already exists.` }
|
||||||
|
} else {
|
||||||
|
value = encrypt(value);
|
||||||
|
await prisma.secret.create({
|
||||||
|
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
value = encrypt(value);
|
||||||
|
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
|
||||||
|
|
||||||
|
if (found) {
|
||||||
|
await prisma.secret.updateMany({
|
||||||
|
where: { applicationId: id, name, isPRMRSecret },
|
||||||
|
data: { value, isBuildSecret, isPRMRSecret }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await prisma.secret.create({
|
||||||
|
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteSecret(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { name } = request.body
|
||||||
|
await prisma.secret.deleteMany({ where: { applicationId: id, name } });
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getStorages(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const persistentStorages = await prisma.applicationPersistentStorage.findMany({ where: { applicationId: id } });
|
||||||
|
return {
|
||||||
|
persistentStorages
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveStorage(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { path, newStorage, storageId } = request.body
|
||||||
|
|
||||||
|
if (newStorage) {
|
||||||
|
await prisma.applicationPersistentStorage.create({
|
||||||
|
data: { path, application: { connect: { id } } }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await prisma.applicationPersistentStorage.update({
|
||||||
|
where: { id: storageId },
|
||||||
|
data: { path }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteStorage(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { path } = request.body
|
||||||
|
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id, path } });
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPreviews(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { teamId } = request.user
|
||||||
|
let secrets = await prisma.secret.findMany({
|
||||||
|
where: { applicationId: id },
|
||||||
|
orderBy: { createdAt: 'desc' }
|
||||||
|
});
|
||||||
|
secrets = secrets.map((secret) => {
|
||||||
|
secret.value = decrypt(secret.value);
|
||||||
|
return secret;
|
||||||
|
});
|
||||||
|
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
|
||||||
|
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
|
||||||
|
const destinationDocker = await prisma.destinationDocker.findFirst({
|
||||||
|
where: { application: { some: { id } }, teams: { some: { id: teamId } } }
|
||||||
|
});
|
||||||
|
const docker = dockerInstance({ destinationDocker });
|
||||||
|
const listContainers = await docker.engine.listContainers({
|
||||||
|
filters: { network: [destinationDocker.network], name: [id] }
|
||||||
|
});
|
||||||
|
const containers = listContainers.filter((container) => {
|
||||||
|
return (
|
||||||
|
container.Labels['coolify.configuration'] &&
|
||||||
|
container.Labels['coolify.type'] === 'standalone-application'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
const jsonContainers = containers
|
||||||
|
.map((container) =>
|
||||||
|
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
|
||||||
|
)
|
||||||
|
.filter((container) => {
|
||||||
|
return container.pullmergeRequestId && container.applicationId === id;
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
containers: jsonContainers,
|
||||||
|
applicationSecrets: applicationSecrets.sort((a, b) => {
|
||||||
|
return ('' + a.name).localeCompare(b.name);
|
||||||
|
}),
|
||||||
|
PRMRSecrets: PRMRSecrets.sort((a, b) => {
|
||||||
|
return ('' + a.name).localeCompare(b.name);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getApplicationLogs(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { since = 0 } = request.query
|
||||||
|
if (since !== 0) {
|
||||||
|
since = day(since).unix();
|
||||||
|
}
|
||||||
|
const { destinationDockerId, destinationDocker } = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { destinationDocker: true }
|
||||||
|
});
|
||||||
|
if (destinationDockerId) {
|
||||||
|
const docker = dockerInstance({ destinationDocker });
|
||||||
|
try {
|
||||||
|
const container = await docker.engine.getContainer(id);
|
||||||
|
if (container) {
|
||||||
|
const { default: ansi } = await import('strip-ansi')
|
||||||
|
const logs = (
|
||||||
|
await container.logs({
|
||||||
|
stdout: true,
|
||||||
|
stderr: true,
|
||||||
|
timestamps: true,
|
||||||
|
since,
|
||||||
|
tail: 5000
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.map((l) => ansi(l.slice(8)))
|
||||||
|
.filter((a) => a);
|
||||||
|
return {
|
||||||
|
logs
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
logs: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getBuildLogs(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { buildId, skip = 0 } = request.query
|
||||||
|
if (typeof skip !== 'number') {
|
||||||
|
skip = Number(skip)
|
||||||
|
}
|
||||||
|
|
||||||
|
let builds = [];
|
||||||
|
|
||||||
|
const buildCount = await prisma.build.count({ where: { applicationId: id } });
|
||||||
|
if (buildId) {
|
||||||
|
builds = await prisma.build.findMany({ where: { applicationId: id, id: buildId } });
|
||||||
|
} else {
|
||||||
|
builds = await prisma.build.findMany({
|
||||||
|
where: { applicationId: id },
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
take: 5,
|
||||||
|
skip
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
builds = builds.map((build) => {
|
||||||
|
const updatedAt = day(build.updatedAt).utc();
|
||||||
|
build.took = updatedAt.diff(day(build.createdAt)) / 1000;
|
||||||
|
build.since = updatedAt.fromNow();
|
||||||
|
return build;
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
builds,
|
||||||
|
buildCount
|
||||||
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getBuildIdLogs(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id, buildId } = request.params
|
||||||
|
let { sequence = 0 } = request.query
|
||||||
|
if (typeof sequence !== 'number') {
|
||||||
|
sequence = Number(sequence)
|
||||||
|
}
|
||||||
|
let logs = await prisma.buildLog.findMany({
|
||||||
|
where: { buildId, time: { gt: sequence } },
|
||||||
|
orderBy: { time: 'asc' }
|
||||||
|
});
|
||||||
|
const data = await prisma.build.findFirst({ where: { id: buildId } });
|
||||||
|
return {
|
||||||
|
logs,
|
||||||
|
status: data?.status || 'queued'
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getGitLabSSHKey(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const application = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { gitSource: { include: { gitlabApp: true } } }
|
||||||
|
});
|
||||||
|
return { publicKey: application.gitSource.gitlabApp.publicSshKey };
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveGitLabSSHKey(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const application = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { gitSource: { include: { gitlabApp: true } } }
|
||||||
|
});
|
||||||
|
if (!application.gitSource?.gitlabApp?.privateSshKey) {
|
||||||
|
const keys = await generateSshKeyPair();
|
||||||
|
const encryptedPrivateKey = encrypt(keys.privateKey);
|
||||||
|
await prisma.gitlabApp.update({
|
||||||
|
where: { id: application.gitSource.gitlabApp.id },
|
||||||
|
data: { privateSshKey: encryptedPrivateKey, publicSshKey: keys.publicKey }
|
||||||
|
});
|
||||||
|
return reply.code(201).send({ publicKey: keys.publicKey })
|
||||||
|
}
|
||||||
|
return { message: 'SSH key already exists' }
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveDeployKey(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { deployKeyId } = request.body;
|
||||||
|
|
||||||
|
deployKeyId = Number(deployKeyId);
|
||||||
|
const application = await prisma.application.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { gitSource: { include: { gitlabApp: true } } }
|
||||||
|
});
|
||||||
|
await prisma.gitlabApp.update({
|
||||||
|
where: { id: application.gitSource.gitlabApp.id },
|
||||||
|
data: { deployKeyId }
|
||||||
|
});
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function cancelDeployment(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { buildId, applicationId } = request.body;
|
||||||
|
if (!buildId) {
|
||||||
|
throw { status: 500, message: 'buildId is required' }
|
||||||
|
|
||||||
|
}
|
||||||
|
await stopBuild(buildId, applicationId);
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
90
apps/api/src/routes/api/v1/applications/index.ts
Normal file
90
apps/api/src/routes/api/v1/applications/index.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
|
import { cancelDeployment, checkDNS, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication } from './handlers';
|
||||||
|
|
||||||
|
export interface GetApplication {
|
||||||
|
Params: { id: string; }
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SaveApplication {
|
||||||
|
Params: { id: string; },
|
||||||
|
Body: any
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SaveApplicationSettings {
|
||||||
|
Params: { id: string; };
|
||||||
|
Querystring: { domain: string; };
|
||||||
|
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeleteApplication {
|
||||||
|
Params: { id: string; };
|
||||||
|
Querystring: { domain: string; };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CheckDNS {
|
||||||
|
Params: { id: string; };
|
||||||
|
Querystring: { domain: string; };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeployApplication {
|
||||||
|
Params: { id: string },
|
||||||
|
Querystring: { domain: string }
|
||||||
|
Body: { pullmergeRequestId: string | null, branch: string }
|
||||||
|
}
|
||||||
|
|
||||||
|
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||||
|
fastify.addHook('onRequest', async (request, reply) => {
|
||||||
|
return await request.jwtVerify()
|
||||||
|
})
|
||||||
|
fastify.get('/', async (request) => await listApplications(request));
|
||||||
|
fastify.post('/images', async (request) => await getImages(request));
|
||||||
|
|
||||||
|
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
|
||||||
|
|
||||||
|
fastify.get<GetApplication>('/:id', async (request) => await getApplication(request));
|
||||||
|
fastify.post<SaveApplication>('/:id', async (request, reply) => await saveApplication(request, reply));
|
||||||
|
fastify.delete<DeleteApplication>('/:id', async (request, reply) => await deleteApplication(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||||
|
|
||||||
|
fastify.post<SaveApplicationSettings>('/:id/settings', async (request, reply) => await saveApplicationSettings(request, reply));
|
||||||
|
fastify.post<SaveApplicationSettings>('/:id/check', async (request) => await checkDNS(request));
|
||||||
|
|
||||||
|
fastify.get('/:id/secrets', async (request) => await getSecrets(request));
|
||||||
|
fastify.post('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
|
||||||
|
fastify.delete('/:id/secrets', async (request) => await deleteSecret(request));
|
||||||
|
|
||||||
|
fastify.get('/:id/storages', async (request) => await getStorages(request));
|
||||||
|
fastify.post('/:id/storages', async (request, reply) => await saveStorage(request, reply));
|
||||||
|
fastify.delete('/:id/storages', async (request) => await deleteStorage(request));
|
||||||
|
|
||||||
|
fastify.get('/:id/previews', async (request) => await getPreviews(request));
|
||||||
|
|
||||||
|
fastify.get('/:id/logs', async (request) => await getApplicationLogs(request));
|
||||||
|
fastify.get('/:id/logs/build', async (request) => await getBuildLogs(request));
|
||||||
|
fastify.get('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||||
|
|
||||||
|
fastify.get<DeployApplication>('/:id/usage', async (request) => await getUsage(request))
|
||||||
|
|
||||||
|
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
||||||
|
fastify.post('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/configuration/source', async (request, reply) => await saveApplicationSource(request, reply));
|
||||||
|
|
||||||
|
fastify.get('/:id/configuration/repository', async (request) => await checkRepository(request));
|
||||||
|
fastify.post('/:id/configuration/repository', async (request, reply) => await saveRepository(request, reply));
|
||||||
|
fastify.post('/:id/configuration/destination', async (request, reply) => await saveDestination(request, reply));
|
||||||
|
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||||
|
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||||
|
|
||||||
|
fastify.get('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||||
|
fastify.post('/:id/configuration/sshkey', async (request, reply) => await saveGitLabSSHKey(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/configuration/deploykey', async (request, reply) => await saveDeployKey(request, reply));
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
fastify.get('/:id/configuration/githubToken', async (request, reply) => await getGitHubToken(request, reply));
|
||||||
|
};
|
||||||
|
|
||||||
|
export default root;
|
||||||
19
apps/api/src/routes/api/v1/base/index.ts
Normal file
19
apps/api/src/routes/api/v1/base/index.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
|
import { errorHandler, version } from '../../../../lib/common';
|
||||||
|
|
||||||
|
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||||
|
fastify.get('/', async (request) => {
|
||||||
|
try {
|
||||||
|
return {
|
||||||
|
version,
|
||||||
|
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
||||||
|
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
export default root;
|
||||||
471
apps/api/src/routes/api/v1/databases/handlers.ts
Normal file
471
apps/api/src/routes/api/v1/databases/handlers.ts
Normal file
@@ -0,0 +1,471 @@
|
|||||||
|
import cuid from 'cuid';
|
||||||
|
import type { FastifyRequest } from 'fastify';
|
||||||
|
import { FastifyReply } from 'fastify';
|
||||||
|
import yaml from 'js-yaml';
|
||||||
|
import fs from 'fs/promises';
|
||||||
|
import { asyncExecShell, ComposeFile, createDirectories, decrypt, encrypt, errorHandler, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePort, listSettings, makeLabelForStandaloneDatabase, prisma, startTcpProxy, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||||
|
import { dockerInstance, getEngine } from '../../../../lib/docker';
|
||||||
|
import { day } from '../../../../lib/dayjs';
|
||||||
|
|
||||||
|
export async function listDatabases(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const userId = request.user.userId;
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
let databases = []
|
||||||
|
if (teamId === '0') {
|
||||||
|
databases = await prisma.database.findMany({ include: { teams: true } });
|
||||||
|
} else {
|
||||||
|
databases = await prisma.database.findMany({
|
||||||
|
where: { teams: { some: { id: teamId } } },
|
||||||
|
include: { teams: true }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
databases
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function newDatabase(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
|
||||||
|
const name = uniqueName();
|
||||||
|
const dbUser = cuid();
|
||||||
|
const dbUserPassword = encrypt(generatePassword());
|
||||||
|
const rootUser = cuid();
|
||||||
|
const rootUserPassword = encrypt(generatePassword());
|
||||||
|
const defaultDatabase = cuid();
|
||||||
|
|
||||||
|
const { id } = await prisma.database.create({
|
||||||
|
data: {
|
||||||
|
name,
|
||||||
|
defaultDatabase,
|
||||||
|
dbUser,
|
||||||
|
dbUserPassword,
|
||||||
|
rootUser,
|
||||||
|
rootUserPassword,
|
||||||
|
teams: { connect: { id: teamId } },
|
||||||
|
settings: { create: { isPublic: false } }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send({ id })
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getDatabase(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params;
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (!database) {
|
||||||
|
throw { status: 404, message: 'Database not found.' }
|
||||||
|
}
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
const { destinationDockerId, destinationDocker } = database;
|
||||||
|
let isRunning = false;
|
||||||
|
if (destinationDockerId) {
|
||||||
|
const host = getEngine(destinationDocker.engine);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { stdout } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker inspect --format '{{json .State}}' ${id}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (JSON.parse(stdout).Running) {
|
||||||
|
isRunning = true;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const configuration = generateDatabaseConfiguration(database);
|
||||||
|
const settings = await listSettings();
|
||||||
|
return {
|
||||||
|
privatePort: configuration?.privatePort,
|
||||||
|
database,
|
||||||
|
isRunning,
|
||||||
|
versions: await getDatabaseVersions(database.type),
|
||||||
|
settings
|
||||||
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getDatabaseTypes(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
return {
|
||||||
|
types: supportedDatabaseTypesAndVersions
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveDatabaseType(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params;
|
||||||
|
const { type } = request.body;
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: { type }
|
||||||
|
});
|
||||||
|
return reply.code(201).send({})
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getVersions(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
const { type } = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
versions: supportedDatabaseTypesAndVersions.find((name) => name.name === type).versions
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveVersion(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params;
|
||||||
|
const { version } = request.body;
|
||||||
|
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
version,
|
||||||
|
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send({})
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveDatabaseDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params;
|
||||||
|
const { destinationId } = request.body;
|
||||||
|
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: { destinationDocker: { connect: { id: destinationId } } }
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
destinationDockerId,
|
||||||
|
destinationDocker: { engine },
|
||||||
|
version,
|
||||||
|
type
|
||||||
|
} = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
const host = getEngine(engine);
|
||||||
|
if (type && version) {
|
||||||
|
const baseImage = getDatabaseImage(type);
|
||||||
|
asyncExecShell(`DOCKER_HOST=${host} docker pull ${baseImage}:${version}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reply.code(201).send({})
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getDatabaseUsage(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params;
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
let usage = {};
|
||||||
|
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
if (database.destinationDockerId) {
|
||||||
|
[usage] = await Promise.all([getContainerUsage(database.destinationDocker.engine, id)]);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
usage
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function startDatabase(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
const {
|
||||||
|
type,
|
||||||
|
destinationDockerId,
|
||||||
|
destinationDocker,
|
||||||
|
publicPort,
|
||||||
|
settings: { isPublic }
|
||||||
|
} = database;
|
||||||
|
const { privatePort, environmentVariables, image, volume, ulimits } =
|
||||||
|
generateDatabaseConfiguration(database);
|
||||||
|
|
||||||
|
const network = destinationDockerId && destinationDocker.network;
|
||||||
|
const host = getEngine(destinationDocker.engine);
|
||||||
|
const engine = destinationDocker.engine;
|
||||||
|
const volumeName = volume.split(':')[0];
|
||||||
|
const labels = await makeLabelForStandaloneDatabase({ id, image, volume });
|
||||||
|
|
||||||
|
const { workdir } = await createDirectories({ repository: type, buildId: id });
|
||||||
|
|
||||||
|
const composeFile: ComposeFile = {
|
||||||
|
version: '3.8',
|
||||||
|
services: {
|
||||||
|
[id]: {
|
||||||
|
container_name: id,
|
||||||
|
image,
|
||||||
|
networks: [network],
|
||||||
|
environment: environmentVariables,
|
||||||
|
volumes: [volume],
|
||||||
|
ulimits,
|
||||||
|
labels,
|
||||||
|
restart: 'always',
|
||||||
|
deploy: {
|
||||||
|
restart_policy: {
|
||||||
|
condition: 'on-failure',
|
||||||
|
delay: '5s',
|
||||||
|
max_attempts: 3,
|
||||||
|
window: '120s'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
networks: {
|
||||||
|
[network]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[volumeName]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||||
|
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker volume create ${volumeName}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker compose -f ${composeFileDestination} up -d`);
|
||||||
|
if (isPublic) await startTcpProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
|
return {};
|
||||||
|
} catch (error) {
|
||||||
|
throw {
|
||||||
|
error
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function stopDatabase(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
const everStarted = await stopDatabaseContainer(database);
|
||||||
|
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
settings: { upsert: { update: { isPublic: false }, create: { isPublic: false } } }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||||
|
return {};
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getDatabaseLogs(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
let { since = 0 } = request.query
|
||||||
|
if (since !== 0) {
|
||||||
|
since = day(since).unix();
|
||||||
|
}
|
||||||
|
const { destinationDockerId, destinationDocker } = await prisma.database.findUnique({
|
||||||
|
where: { id },
|
||||||
|
include: { destinationDocker: true }
|
||||||
|
});
|
||||||
|
if (destinationDockerId) {
|
||||||
|
const docker = dockerInstance({ destinationDocker });
|
||||||
|
try {
|
||||||
|
const container = await docker.engine.getContainer(id);
|
||||||
|
if (container) {
|
||||||
|
const { default: ansi } = await import('strip-ansi')
|
||||||
|
const logs = (
|
||||||
|
await container.logs({
|
||||||
|
stdout: true,
|
||||||
|
stderr: true,
|
||||||
|
timestamps: true,
|
||||||
|
since,
|
||||||
|
tail: 5000
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.toString()
|
||||||
|
.split('\n')
|
||||||
|
.map((l) => ansi(l.slice(8)))
|
||||||
|
.filter((a) => a);
|
||||||
|
return {
|
||||||
|
logs
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const { statusCode } = error;
|
||||||
|
if (statusCode === 404) {
|
||||||
|
return {
|
||||||
|
logs: []
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
message: 'No logs found.'
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteDatabase(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
if (database.destinationDockerId) {
|
||||||
|
const everStarted = await stopDatabaseContainer(database);
|
||||||
|
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||||
|
}
|
||||||
|
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||||
|
await prisma.database.delete({ where: { id } });
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveDatabase(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
const {
|
||||||
|
name,
|
||||||
|
defaultDatabase,
|
||||||
|
dbUser,
|
||||||
|
dbUserPassword,
|
||||||
|
rootUser,
|
||||||
|
rootUserPassword,
|
||||||
|
version,
|
||||||
|
isRunning
|
||||||
|
} = request.body;
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
if (isRunning) {
|
||||||
|
if (database.dbUserPassword !== dbUserPassword) {
|
||||||
|
await updatePasswordInDb(database, dbUser, dbUserPassword, false);
|
||||||
|
} else if (database.rootUserPassword !== rootUserPassword) {
|
||||||
|
await updatePasswordInDb(database, rootUser, rootUserPassword, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const encryptedDbUserPassword = dbUserPassword && encrypt(dbUserPassword);
|
||||||
|
const encryptedRootUserPassword = rootUserPassword && encrypt(rootUserPassword);
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
name,
|
||||||
|
defaultDatabase,
|
||||||
|
dbUser,
|
||||||
|
dbUserPassword: encryptedDbUserPassword,
|
||||||
|
rootUser,
|
||||||
|
rootUserPassword: encryptedRootUserPassword,
|
||||||
|
version
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send({})
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveDatabaseSettings(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.params;
|
||||||
|
const { isPublic, appendOnly = true } = request.body;
|
||||||
|
const publicPort = await getFreePort();
|
||||||
|
const settings = await listSettings();
|
||||||
|
await prisma.database.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
settings: { upsert: { update: { isPublic, appendOnly }, create: { isPublic, appendOnly } } }
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const database = await prisma.database.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
|
include: { destinationDocker: true, settings: true }
|
||||||
|
});
|
||||||
|
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||||
|
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||||
|
|
||||||
|
const { destinationDockerId, destinationDocker, publicPort: oldPublicPort } = database;
|
||||||
|
const { privatePort } = generateDatabaseConfiguration(database);
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
if (isPublic) {
|
||||||
|
await prisma.database.update({ where: { id }, data: { publicPort } });
|
||||||
|
if (settings.isTraefikUsed) {
|
||||||
|
await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
|
} else {
|
||||||
|
await startTcpProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await prisma.database.update({ where: { id }, data: { publicPort: null } });
|
||||||
|
await stopTcpHttpProxy(id, destinationDocker, oldPublicPort);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { publicPort }
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
32
apps/api/src/routes/api/v1/databases/index.ts
Normal file
32
apps/api/src/routes/api/v1/databases/index.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
|
import { deleteDatabase, getDatabase, getDatabaseLogs, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
|
||||||
|
|
||||||
|
const root: FastifyPluginAsync = async (fastify, opts): Promise<void> => {
|
||||||
|
fastify.addHook('onRequest', async (request, reply) => {
|
||||||
|
return await request.jwtVerify()
|
||||||
|
})
|
||||||
|
fastify.get('/', async (request) => await listDatabases(request));
|
||||||
|
fastify.post('/new', async (request, reply) => await newDatabase(request, reply));
|
||||||
|
|
||||||
|
fastify.get('/:id', async (request) => await getDatabase(request));
|
||||||
|
fastify.post('/:id', async (request, reply) => await saveDatabase(request, reply));
|
||||||
|
fastify.delete('/:id', async (request) => await deleteDatabase(request));
|
||||||
|
|
||||||
|
fastify.post('/:id/settings', async (request) => await saveDatabaseSettings(request));
|
||||||
|
|
||||||
|
fastify.get('/:id/configuration/type', async (request) => await getDatabaseTypes(request));
|
||||||
|
fastify.post('/:id/configuration/type', async (request, reply) => await saveDatabaseType(request, reply));
|
||||||
|
|
||||||
|
fastify.get('/:id/configuration/version', async (request) => await getVersions(request));
|
||||||
|
fastify.post('/:id/configuration/version', async (request, reply) => await saveVersion(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/configuration/destination', async (request, reply) => await saveDatabaseDestination(request, reply));
|
||||||
|
|
||||||
|
fastify.get('/:id/usage', async (request) => await getDatabaseUsage(request));
|
||||||
|
fastify.get('/:id/logs', async (request) => await getDatabaseLogs(request));
|
||||||
|
|
||||||
|
fastify.post('/:id/start', async (request) => await startDatabase(request));
|
||||||
|
fastify.post('/:id/stop', async (request) => await stopDatabase(request));
|
||||||
|
};
|
||||||
|
|
||||||
|
export default root;
|
||||||
197
apps/api/src/routes/api/v1/destinations/handlers.ts
Normal file
197
apps/api/src/routes/api/v1/destinations/handlers.ts
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
import type { FastifyRequest } from 'fastify';
|
||||||
|
import { FastifyReply } from 'fastify';
|
||||||
|
import { asyncExecShell, errorHandler, listSettings, prisma, startCoolifyProxy, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
||||||
|
import { checkContainer, dockerInstance, getEngine } from '../../../../lib/docker';
|
||||||
|
|
||||||
|
export async function listDestinations(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
let destinations = []
|
||||||
|
if (teamId === '0') {
|
||||||
|
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
||||||
|
} else {
|
||||||
|
destinations = await prisma.destinationDocker.findMany({
|
||||||
|
where: { teams: { some: { id: teamId } } },
|
||||||
|
include: { teams: true }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
destinations
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function checkDestination(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { network } = request.body;
|
||||||
|
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
||||||
|
if (found) {
|
||||||
|
throw {
|
||||||
|
message: `Network already exists: ${network}`
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function getDestination(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
const destination = await prisma.destinationDocker.findFirst({
|
||||||
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||||
|
});
|
||||||
|
if (!destination && id !== 'new') {
|
||||||
|
throw { status: 404, message: `Destination not found.` };
|
||||||
|
}
|
||||||
|
const settings = await listSettings();
|
||||||
|
let payload = {
|
||||||
|
destination,
|
||||||
|
settings,
|
||||||
|
state: false
|
||||||
|
};
|
||||||
|
|
||||||
|
if (destination?.remoteEngine) {
|
||||||
|
// const { stdout } = await asyncExecShell(
|
||||||
|
// `ssh -p ${destination.port} ${destination.user}@${destination.ipAddress} "docker ps -a"`
|
||||||
|
// );
|
||||||
|
// console.log(stdout)
|
||||||
|
// const engine = await generateRemoteEngine(destination);
|
||||||
|
// // await saveSshKey(destination);
|
||||||
|
// payload.state = await checkContainer(engine, 'coolify-haproxy');
|
||||||
|
} else {
|
||||||
|
const containerName = 'coolify-proxy';
|
||||||
|
payload.state =
|
||||||
|
destination?.engine && (await checkContainer(destination.engine, containerName));
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...payload
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function newDestination(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
let { name, network, engine, isCoolifyProxyUsed } = request.body
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
if (id === 'new') {
|
||||||
|
const host = getEngine(engine);
|
||||||
|
const docker = dockerInstance({ destinationDocker: { engine, network } });
|
||||||
|
const found = await docker.engine.listNetworks({ filters: { name: [`^${network}$`] } });
|
||||||
|
if (found.length === 0) {
|
||||||
|
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
|
||||||
|
}
|
||||||
|
await prisma.destinationDocker.create({
|
||||||
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||||
|
});
|
||||||
|
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||||
|
const destination = destinations.find((destination) => destination.network === network);
|
||||||
|
|
||||||
|
if (destinations.length > 0) {
|
||||||
|
const proxyConfigured = destinations.find(
|
||||||
|
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
|
||||||
|
);
|
||||||
|
if (proxyConfigured) {
|
||||||
|
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||||
|
}
|
||||||
|
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
|
||||||
|
}
|
||||||
|
if (isCoolifyProxyUsed) {
|
||||||
|
const settings = await prisma.setting.findFirst();
|
||||||
|
if (settings?.isTraefikUsed) {
|
||||||
|
await startTraefikProxy(engine);
|
||||||
|
} else {
|
||||||
|
await startCoolifyProxy(engine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reply.code(201).send({ id: destination.id });
|
||||||
|
} else {
|
||||||
|
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||||
|
return reply.code(201).send();
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteDestination(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const destination = await prisma.destinationDocker.delete({ where: { id } });
|
||||||
|
if (destination.isCoolifyProxyUsed) {
|
||||||
|
const host = getEngine(destination.engine);
|
||||||
|
const { network } = destination;
|
||||||
|
const settings = await prisma.setting.findFirst();
|
||||||
|
const containerName = settings.isTraefikUsed ? 'coolify-proxy' : 'coolify-haproxy';
|
||||||
|
const { stdout: found } = await asyncExecShell(
|
||||||
|
`DOCKER_HOST=${host} docker ps -a --filter network=${network} --filter name=${containerName} --format '{{.}}'`
|
||||||
|
);
|
||||||
|
if (found) {
|
||||||
|
await asyncExecShell(
|
||||||
|
`DOCKER_HOST="${host}" docker network disconnect ${network} ${containerName}`
|
||||||
|
);
|
||||||
|
await asyncExecShell(`DOCKER_HOST="${host}" docker network rm ${network}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function saveDestinationSettings(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const { engine, isCoolifyProxyUsed } = request.body;
|
||||||
|
await prisma.destinationDocker.updateMany({
|
||||||
|
where: { engine },
|
||||||
|
data: { isCoolifyProxyUsed }
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 202
|
||||||
|
}
|
||||||
|
// return reply.code(201).send();
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function startProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
const { engine } = request.body;
|
||||||
|
try {
|
||||||
|
await startTraefikProxy(engine);
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
await stopTraefikProxy(engine);
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function stopProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
const settings = await prisma.setting.findFirst({});
|
||||||
|
const { engine } = request.body;
|
||||||
|
try {
|
||||||
|
await stopTraefikProxy(engine);
|
||||||
|
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function restartProxy(request: FastifyRequest, reply: FastifyReply) {
|
||||||
|
const settings = await prisma.setting.findFirst({});
|
||||||
|
const { engine } = request.body;
|
||||||
|
try {
|
||||||
|
await stopTraefikProxy(engine);
|
||||||
|
await startTraefikProxy(engine);
|
||||||
|
await prisma.destinationDocker.updateMany({
|
||||||
|
where: { engine },
|
||||||
|
data: { isCoolifyProxyUsed: true }
|
||||||
|
});
|
||||||
|
return {}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user