mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-30 12:33:45 +00:00
Compare commits
210 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb1d86d08b | ||
|
|
88f3f628ef | ||
|
|
295bea37bc | ||
|
|
bd7d756254 | ||
|
|
4261147fe8 | ||
|
|
a70adc5eb3 | ||
|
|
06d40b8a81 | ||
|
|
2358510cba | ||
|
|
e6d13cb7d7 | ||
|
|
39e21c3f36 | ||
|
|
8da900ee72 | ||
|
|
9f4e81a1a3 | ||
|
|
0b918c2f51 | ||
|
|
085cd2a314 | ||
|
|
98d2399568 | ||
|
|
515d9a0008 | ||
|
|
aece1fa7d3 | ||
|
|
abc614ecfd | ||
|
|
1180d3fdde | ||
|
|
1639d1725a | ||
|
|
5df1deecbc | ||
|
|
fe3c0cf76e | ||
|
|
cc0df0182c | ||
|
|
eb354f639f | ||
|
|
02c530dcbe | ||
|
|
4ad7e1f8e6 | ||
|
|
2007ba0c3b | ||
|
|
2009dc11db | ||
|
|
62f2196a0c | ||
|
|
e63c65da4f | ||
|
|
570a082227 | ||
|
|
9b1ede3a59 | ||
|
|
c445fc0f8a | ||
|
|
699493cf24 | ||
|
|
6c89686f31 | ||
|
|
f55b861849 | ||
|
|
adf82c04ad | ||
|
|
1b80956fe8 | ||
|
|
de9da8caf9 | ||
|
|
967f42dd89 | ||
|
|
95e8b29fa2 | ||
|
|
2e3c815e53 | ||
|
|
132707caa7 | ||
|
|
0dad616c38 | ||
|
|
c0882dffde | ||
|
|
5e082c647c | ||
|
|
285c3c2f5d | ||
|
|
dcb29a80fe | ||
|
|
b45ad19732 | ||
|
|
f12d453b5f | ||
|
|
8a00b711be | ||
|
|
56204efc7a | ||
|
|
da638c270f | ||
|
|
ad4b974274 | ||
|
|
943a05edcc | ||
|
|
1a28e65e50 | ||
|
|
cd3af7fa39 | ||
|
|
8ccb0c88db | ||
|
|
127880cf8d | ||
|
|
2e56086661 | ||
|
|
a129be0dbd | ||
|
|
12c0760cb3 | ||
|
|
9d3ed85ffd | ||
|
|
850d57d0d2 | ||
|
|
7981bec1ed | ||
|
|
76373a8597 | ||
|
|
9913e7b70b | ||
|
|
a08bb25bfa | ||
|
|
28ec164bc2 | ||
|
|
3d5ea8629c | ||
|
|
4aaf59d034 | ||
|
|
14850476c7 | ||
|
|
bf5b6170fa | ||
|
|
6f91591448 | ||
|
|
3c723bcba2 | ||
|
|
e7dd13cffa | ||
|
|
ad91630faa | ||
|
|
57f746b584 | ||
|
|
a55720091c | ||
|
|
b461635834 | ||
|
|
1375580651 | ||
|
|
3d20433ad1 | ||
|
|
58447c6456 | ||
|
|
c6273e9177 | ||
|
|
ffdc158d44 | ||
|
|
876c81fad8 | ||
|
|
028ee6d7b1 | ||
|
|
ec00548f1b | ||
|
|
c4dc03e4a8 | ||
|
|
3a510a77ec | ||
|
|
98a785fced | ||
|
|
c48654160d | ||
|
|
55b80132c4 | ||
|
|
1f0c168936 | ||
|
|
6715bc750f | ||
|
|
04a48a626b | ||
|
|
2f9f0da7c6 | ||
|
|
513c4f9e29 | ||
|
|
3f078517a0 | ||
|
|
37036f0fca | ||
|
|
5789aadb5c | ||
|
|
a768ed718a | ||
|
|
9c6092f31f | ||
|
|
40d294a247 | ||
|
|
72844e4edc | ||
|
|
db0a71125a | ||
|
|
da244af39d | ||
|
|
067f502d3c | ||
|
|
fffc6b1e4e | ||
|
|
9121c6a078 | ||
|
|
9c4e581d8b | ||
|
|
dfadd31f46 | ||
|
|
0cfa6fff43 | ||
|
|
d61671c1a0 | ||
|
|
d4f10a9af3 | ||
|
|
03861af893 | ||
|
|
ae531c445d | ||
|
|
4b26aeef9a | ||
|
|
1e47b79b50 | ||
|
|
0c223dcec4 | ||
|
|
0f4536c3d3 | ||
|
|
f43c584463 | ||
|
|
91c558ec83 | ||
|
|
9d45ab3246 | ||
|
|
34ff6eb567 | ||
|
|
8793c00438 | ||
|
|
d7981d5c3e | ||
|
|
bcaae3b67b | ||
|
|
046d9f9597 | ||
|
|
81bd0301d2 | ||
|
|
530e7e494f | ||
|
|
d402fd5690 | ||
|
|
eebec3b92f | ||
|
|
211c6585fa | ||
|
|
e1b5c40ca0 | ||
|
|
747a9b521b | ||
|
|
c2d72ad309 | ||
|
|
596181b622 | ||
|
|
77c5270e1e | ||
|
|
a663c14df8 | ||
|
|
3bd9f00268 | ||
|
|
1aadda735d | ||
|
|
12035208e2 | ||
|
|
df8a9f673c | ||
|
|
aa5c8a2c56 | ||
|
|
a84540e6bb | ||
|
|
fb91b64063 | ||
|
|
94cc77ebca | ||
|
|
aac6981304 | ||
|
|
ca05828b68 | ||
|
|
8ec6b4c59c | ||
|
|
f1be5f5341 | ||
|
|
714c264002 | ||
|
|
eca58097ef | ||
|
|
281146e22b | ||
|
|
f3a19a5d02 | ||
|
|
9b9b6937f4 | ||
|
|
f54c0b7dff | ||
|
|
36c58ad286 | ||
|
|
a67f633259 | ||
|
|
f39a607c1a | ||
|
|
0cc67ed2e5 | ||
|
|
5f8402c645 | ||
|
|
3ab87cd11e | ||
|
|
d5620d305d | ||
|
|
35ebc5e842 | ||
|
|
66276be1d2 | ||
|
|
47c0d522db | ||
|
|
b654883d1a | ||
|
|
b4f9d29129 | ||
|
|
bec6b961f3 | ||
|
|
2ce8f34306 | ||
|
|
30d1ae59ec | ||
|
|
ac7d4e3645 | ||
|
|
868c4001f6 | ||
|
|
e99c44d967 | ||
|
|
48a877f160 | ||
|
|
cea894a8bd | ||
|
|
087e7b9311 | ||
|
|
39ba498293 | ||
|
|
fe7390bd4d | ||
|
|
75af551435 | ||
|
|
ae2d3ebb48 | ||
|
|
5ff6c53715 | ||
|
|
3c94723b23 | ||
|
|
c6a2e3e328 | ||
|
|
2dc5e10878 | ||
|
|
4086dfcf56 | ||
|
|
7937c2bab0 | ||
|
|
5ffa8e9936 | ||
|
|
c431cee517 | ||
|
|
375f17e728 | ||
|
|
d3f658c874 | ||
|
|
5e340a4cdd | ||
|
|
409a5b9f99 | ||
|
|
fba305020b | ||
|
|
bd4ce3ac45 | ||
|
|
733de60f7c | ||
|
|
c365a44e01 | ||
|
|
e94f450bf0 | ||
|
|
d5efc9ddde | ||
|
|
68895ba4a5 | ||
|
|
139aa7a0fc | ||
|
|
4955157e13 | ||
|
|
f2dd5cc75e | ||
|
|
2ad634dbc6 | ||
|
|
de13f65a24 | ||
|
|
e038865693 | ||
|
|
dfd29dc37a | ||
|
|
4448b86b93 |
@@ -8,7 +8,6 @@ package
|
|||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
dist
|
dist
|
||||||
client
|
|
||||||
apps/api/db/*.db
|
apps/api/db/*.db
|
||||||
local-serve
|
local-serve
|
||||||
apps/api/db/migration.db-journal
|
apps/api/db/migration.db-journal
|
||||||
|
|||||||
10
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
10
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@@ -9,13 +9,21 @@ body:
|
|||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
Thanks for taking the time to fill out this bug report! Please fill the form in English
|
Thanks for taking the time to fill out this bug report! Please fill the form in English.
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: Is there an existing issue for this?
|
label: Is there an existing issue for this?
|
||||||
options:
|
options:
|
||||||
- label: I have searched the existing issues
|
- label: I have searched the existing issues
|
||||||
required: true
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: repository
|
||||||
|
attributes:
|
||||||
|
label: Example public repository
|
||||||
|
description: "An example public git repository to reproduce the issue easily (if applicable)."
|
||||||
|
placeholder: "ex: https://github.com/coollabsio/coolify"
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Description
|
label: Description
|
||||||
|
|||||||
93
.github/workflows/fluent-bit-release.yml
vendored
Normal file
93
.github/workflows/fluent-bit-release.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
name: fluent-bit-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "others/fluentbit"
|
||||||
|
- ".github/workflows/fluent-bit-release.yml"
|
||||||
|
branches:
|
||||||
|
- next
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
arm64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/fluentbit/
|
||||||
|
platforms: linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/coolify-fluent-bit:1.0.0-arm64
|
||||||
|
amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: others/fluentbit/
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/coolify-fluent-bit:1.0.0-amd64
|
||||||
|
aarch64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/fluentbit/
|
||||||
|
platforms: linux/aarch64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||||
|
merge-manifest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [amd64, arm64, aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Create & publish manifest
|
||||||
|
run: |
|
||||||
|
docker manifest create coollabsio/coolify-fluent-bit:1.0.0 --amend coollabsio/coolify-fluent-bit:1.0.0-amd64 --amend coollabsio/coolify-fluent-bit:1.0.0-arm64 --amend coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||||
|
docker manifest push coollabsio/coolify-fluent-bit:1.0.0
|
||||||
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
93
.github/workflows/pocketbase-release.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
name: pocketbase-release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "others/pocketbase"
|
||||||
|
- ".github/workflows/pocketbase-release.yml"
|
||||||
|
branches:
|
||||||
|
- next
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
arm64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-arm64
|
||||||
|
amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-amd64
|
||||||
|
aarch64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v2
|
||||||
|
with:
|
||||||
|
context: others/pocketbase/
|
||||||
|
platforms: linux/aarch64
|
||||||
|
push: true
|
||||||
|
tags: coollabsio/pocketbase:0.8.0-aarch64
|
||||||
|
merge-manifest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [amd64, arm64, aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Create & publish manifest
|
||||||
|
run: |
|
||||||
|
docker manifest create coollabsio/pocketbase:0.8.0 --amend coollabsio/pocketbase:0.8.0-amd64 --amend coollabsio/pocketbase:0.8.0-arm64 --amend coollabsio/pocketbase:0.8.0-aarch64
|
||||||
|
docker manifest push coollabsio/pocketbase:0.8.0
|
||||||
6
.github/workflows/staging-release.yml
vendored
6
.github/workflows/staging-release.yml
vendored
@@ -2,6 +2,12 @@ name: staging-release
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
paths:
|
||||||
|
- "**"
|
||||||
|
- "!others/fluentbit"
|
||||||
|
- "!others/pocketbase"
|
||||||
|
- "!.github/workflows/fluent-bit-release.yml"
|
||||||
|
- "!.github/workflows/pocketbase-release.yml"
|
||||||
branches:
|
branches:
|
||||||
- next
|
- next
|
||||||
|
|
||||||
|
|||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -8,10 +8,16 @@ package
|
|||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
dist
|
dist
|
||||||
client
|
|
||||||
apps/api/db/*.db
|
apps/api/db/*.db
|
||||||
local-serve
|
|
||||||
apps/api/db/migration.db-journal
|
apps/api/db/migration.db-journal
|
||||||
apps/api/core*
|
apps/api/core*
|
||||||
|
apps/backup/backups/*
|
||||||
|
!apps/backup/backups/.gitkeep
|
||||||
logs
|
logs
|
||||||
others/certificates
|
others/certificates
|
||||||
|
backups/*
|
||||||
|
!backups/.gitkeep
|
||||||
|
|
||||||
|
# Trpc
|
||||||
|
apps/server/db/*.db
|
||||||
|
apps/server/db/*.db-journal
|
||||||
19
.vscode/settings.json
vendored
19
.vscode/settings.json
vendored
@@ -1,11 +1,22 @@
|
|||||||
{
|
{
|
||||||
"i18n-ally.localesPaths": ["src/lib/locales"],
|
"i18n-ally.localesPaths": [
|
||||||
|
"src/lib/locales"
|
||||||
|
],
|
||||||
"i18n-ally.keystyle": "nested",
|
"i18n-ally.keystyle": "nested",
|
||||||
"i18n-ally.extract.ignoredByFiles": {
|
"i18n-ally.extract.ignoredByFiles": {
|
||||||
"src\\routes\\__layout.svelte": ["Coolify", "coolLabs logo"]
|
"src\\routes\\__layout.svelte": [
|
||||||
|
"Coolify",
|
||||||
|
"coolLabs logo"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"i18n-ally.sourceLanguage": "en",
|
"i18n-ally.sourceLanguage": "en",
|
||||||
"i18n-ally.enabledFrameworks": ["svelte"],
|
"i18n-ally.enabledFrameworks": [
|
||||||
"i18n-ally.enabledParsers": ["js", "ts", "json"],
|
"svelte"
|
||||||
|
],
|
||||||
|
"i18n-ally.enabledParsers": [
|
||||||
|
"js",
|
||||||
|
"ts",
|
||||||
|
"json"
|
||||||
|
],
|
||||||
"i18n-ally.extract.autoDetect": true
|
"i18n-ally.extract.autoDetect": true
|
||||||
}
|
}
|
||||||
@@ -34,7 +34,7 @@ You'll need a set of skills to [get started](docs/contribution/GettingStarted.md
|
|||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Or... Copy and paste commands bellow:
|
# Or... Copy and paste commands bellow:
|
||||||
cp apps/api/.env.example apps/api.env
|
cp apps/api/.env.example apps/api/.env
|
||||||
pnpm install
|
pnpm install
|
||||||
pnpm db:push
|
pnpm db:push
|
||||||
pnpm db:seed
|
pnpm db:seed
|
||||||
|
|||||||
33
README.md
33
README.md
@@ -77,6 +77,7 @@ Deploy your resource to:
|
|||||||
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
<a href="https://redis.io"><svg style="width:40px;height:40px" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ><defs ><path id="a" d="m45.536 38.764c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.813s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" /><path id="b" d="m45.536 28.733c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.935c2.332-.837 3.14-.867 5.126-.14s12.35 4.853 14.312 5.57 2.037 1.31.024 2.36z" /></defs ><g transform="matrix(.848327 0 0 .848327 -7.883573 -9.449691)" ><use fill="#a41e11" xlink:href="#a" /><path d="m45.536 34.95c-2.013 1.05-12.44 5.337-14.66 6.494s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276-2.04-1.613-.077-2.382l15.332-5.936c2.332-.836 3.14-.867 5.126-.14s12.35 4.852 14.31 5.582 2.037 1.31.024 2.36z" fill="#d82c20" /><use fill="#a41e11" xlink:href="#a" y="-6.218" /><use fill="#d82c20" xlink:href="#b" /><path d="m45.536 26.098c-2.013 1.05-12.44 5.337-14.66 6.495s-3.453 1.146-5.207.308-12.85-5.32-14.85-6.276c-1-.478-1.524-.88-1.524-1.26v-3.815s14.447-3.145 16.78-3.982 3.14-.867 5.126-.14 13.853 2.868 15.814 3.587v3.76c0 .377-.452.8-1.477 1.324z" fill="#a41e11" /><use fill="#d82c20" xlink:href="#b" y="-6.449" /><g fill="#fff" ><path d="m29.096 20.712-1.182-1.965-3.774-.34 2.816-1.016-.845-1.56 2.636 1.03 2.486-.814-.672 1.612 2.534.95-3.268.34zm-6.296 3.912 8.74-1.342-2.64 3.872z" /><ellipse cx="20.444" cy="21.402" rx="4.672" ry="1.811" /></g ><path d="m42.132 21.138-5.17 2.042-.004-4.087z" fill="#7a0c00" /><path d="m36.963 23.18-.56.22-5.166-2.042 5.723-2.264z" fill="#ad2115" /></g ></svg ></a>
|
||||||
|
|
||||||
### Services
|
### Services
|
||||||
|
|
||||||
- [Appwrite](https://appwrite.io)
|
- [Appwrite](https://appwrite.io)
|
||||||
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
- [WordPress](https://docs.coollabs.io/coolify/services/wordpress)
|
||||||
- [Ghost](https://ghost.org)
|
- [Ghost](https://ghost.org)
|
||||||
@@ -93,23 +94,39 @@ Deploy your resource to:
|
|||||||
- [Fider](https://fider.io)
|
- [Fider](https://fider.io)
|
||||||
- [Hasura](https://hasura.io)
|
- [Hasura](https://hasura.io)
|
||||||
- [GlitchTip](https://glitchtip.com)
|
- [GlitchTip](https://glitchtip.com)
|
||||||
|
- And more...
|
||||||
## Migration from v1
|
|
||||||
|
|
||||||
A fresh installation is necessary. v2 and v3 are not compatible with v1.
|
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
|
||||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||||
|
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||||
- Discord: [Invitation](https://coollabs.io/discord)
|
- Discord: [Invitation](https://coollabs.io/discord)
|
||||||
|
|
||||||
## Development Contributions
|
---
|
||||||
|
|
||||||
Coolify is developed under the Apache License and you can help to make it grow → [Start coding!](./CONTRIBUTION.md)
|
## ⚗️ Expertise Contributions
|
||||||
|
|
||||||
## Financial Contributors
|
Coolify is developed under the [Apache License](./LICENSE) and you can help to make it grow.
|
||||||
|
Our community will be glad to have you on board!
|
||||||
|
|
||||||
|
Learn how to contribute to Coolify as as ...
|
||||||
|
|
||||||
|
→ [👩🏾💻 Software developer](./CONTRIBUTION.md)
|
||||||
|
|
||||||
|
→ [🧑🏻🏫 Translator](./docs/contribution/Translating.md)
|
||||||
|
|
||||||
|
<!--
|
||||||
|
→ 🧑🏽🎨 Designer
|
||||||
|
→ 🙋♀️ Community Managemer
|
||||||
|
→ 🧙🏻♂️ Text Content Creator
|
||||||
|
→ 👨🏼🎤 Video Content Creator
|
||||||
|
-->
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 💰 Financial Contributors
|
||||||
|
|
||||||
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
|
Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/coollabsio/contribute)]
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ COOLIFY_APP_ID=local-dev
|
|||||||
# 32 bits long secret key
|
# 32 bits long secret key
|
||||||
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
COOLIFY_SECRET_KEY=12341234123412341234123412341234
|
||||||
COOLIFY_DATABASE_URL=file:../db/dev.db
|
COOLIFY_DATABASE_URL=file:../db/dev.db
|
||||||
COOLIFY_SENTRY_DSN=
|
|
||||||
|
|
||||||
COOLIFY_IS_ON=docker
|
COOLIFY_IS_ON=docker
|
||||||
COOLIFY_WHITE_LABELED=false
|
COOLIFY_WHITE_LABELED=false
|
||||||
|
|||||||
BIN
apps/api/db/dev.db.bak
Normal file
BIN
apps/api/db/dev.db.bak
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,5 +1,381 @@
|
|||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: v1.8.6
|
defaultVersion: "0.8.0"
|
||||||
|
documentation: https://pocketbase.io/docs/
|
||||||
|
type: pocketbase
|
||||||
|
name: Pocketbase
|
||||||
|
description: "Open Source realtime backend in 1 file"
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
image: coollabsio/pocketbase:$$core_version
|
||||||
|
volumes:
|
||||||
|
- $$id-data:/app/pb_data
|
||||||
|
ports:
|
||||||
|
- "8080"
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: v1.5.1
|
||||||
|
documentation: https://plausible.io/doc/
|
||||||
|
type: plausibleanalytics-arm
|
||||||
|
name: Plausible Analytics (ARM)
|
||||||
|
description: A lightweight and open-source website analytics tool.
|
||||||
|
labels:
|
||||||
|
- analytics
|
||||||
|
- statistics
|
||||||
|
- plausible
|
||||||
|
- gdpr
|
||||||
|
- no-cookie
|
||||||
|
- google analytics
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Plausible Analytics
|
||||||
|
command: >-
|
||||||
|
sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db migrate
|
||||||
|
&& /entrypoint.sh db init-admin && /entrypoint.sh run"
|
||||||
|
depends_on:
|
||||||
|
- $$id-postgresql
|
||||||
|
- $$id-clickhouse
|
||||||
|
image: plausible/analytics:$$core_version
|
||||||
|
environment:
|
||||||
|
- ADMIN_USER_EMAIL=$$config_admin_user_email
|
||||||
|
- ADMIN_USER_NAME=$$config_admin_user_name
|
||||||
|
- ADMIN_USER_PWD=$$secret_admin_user_pwd
|
||||||
|
- BASE_URL=$$config_base_url
|
||||||
|
- SECRET_KEY_BASE=$$secret_secret_key_base
|
||||||
|
- DISABLE_AUTH=$$config_disable_auth
|
||||||
|
- DISABLE_REGISTRATION=$$config_disable_registration
|
||||||
|
- DATABASE_URL=$$secret_database_url
|
||||||
|
- CLICKHOUSE_DATABASE_URL=$$secret_clickhouse_database_url
|
||||||
|
ports:
|
||||||
|
- "8000"
|
||||||
|
$$id-postgresql:
|
||||||
|
name: PostgreSQL
|
||||||
|
image: postgres:14-alpine
|
||||||
|
volumes:
|
||||||
|
- $$id-postgresql-data:/var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=$$secret_postgres_password
|
||||||
|
- POSTGRES_USER=$$config_postgres_user
|
||||||
|
- POSTGRES_DB=$$config_postgres_db
|
||||||
|
$$id-clickhouse:
|
||||||
|
name: Clickhouse
|
||||||
|
volumes:
|
||||||
|
- $$id-clickhouse-data:/var/lib/clickhouse
|
||||||
|
image: clickhouse/clickhouse-server:22.6-alpine
|
||||||
|
ulimits:
|
||||||
|
nofile:
|
||||||
|
soft: 262144
|
||||||
|
hard: 262144
|
||||||
|
files:
|
||||||
|
- location: /etc/clickhouse-server/users.d/logging.xml
|
||||||
|
content: >-
|
||||||
|
<yandex><logger><level>warning</level><console>true</console></logger><query_thread_log
|
||||||
|
remove="remove"/><query_log remove="remove"/><text_log
|
||||||
|
remove="remove"/><trace_log remove="remove"/><metric_log
|
||||||
|
remove="remove"/><asynchronous_metric_log
|
||||||
|
remove="remove"/><session_log remove="remove"/><part_log
|
||||||
|
remove="remove"/></yandex>
|
||||||
|
- location: /etc/clickhouse-server/config.d/logging.xml
|
||||||
|
content: >-
|
||||||
|
<yandex><profiles><default><log_queries>0</log_queries><log_query_threads>0</log_query_threads></default></profiles></yandex>
|
||||||
|
- location: /docker-entrypoint-initdb.d/init.query
|
||||||
|
content: CREATE DATABASE IF NOT EXISTS plausible;
|
||||||
|
- location: /docker-entrypoint-initdb.d/init-db.sh
|
||||||
|
content: >-
|
||||||
|
clickhouse client --queries-file
|
||||||
|
/docker-entrypoint-initdb.d/init.query
|
||||||
|
variables:
|
||||||
|
- id: $$config_base_url
|
||||||
|
name: BASE_URL
|
||||||
|
label: Base URL
|
||||||
|
defaultValue: $$generate_fqdn
|
||||||
|
description: >-
|
||||||
|
You must set this to the FQDN of the Plausible Analytics instance. This is
|
||||||
|
used to generate the links to the Plausible Analytics instance.
|
||||||
|
- id: $$secret_database_url
|
||||||
|
name: DATABASE_URL
|
||||||
|
label: Database URL for PostgreSQL
|
||||||
|
defaultValue: >-
|
||||||
|
postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_clickhouse_database_url
|
||||||
|
name: CLICKHOUSE_DATABASE_URL
|
||||||
|
label: Database URL for Clickhouse
|
||||||
|
defaultValue: http://$$id-clickhouse:8123/plausible
|
||||||
|
description: ""
|
||||||
|
- id: $$config_admin_user_email
|
||||||
|
name: ADMIN_USER_EMAIL
|
||||||
|
label: Admin Email Address
|
||||||
|
defaultValue: admin@example.com
|
||||||
|
description: This is the admin email. Please change it.
|
||||||
|
- id: $$config_admin_user_name
|
||||||
|
name: ADMIN_USER_NAME
|
||||||
|
label: Admin User Name
|
||||||
|
defaultValue: $$generate_username
|
||||||
|
description: This is the admin username. Please change it.
|
||||||
|
- id: $$secret_admin_user_pwd
|
||||||
|
name: ADMIN_USER_PWD
|
||||||
|
label: Admin User Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: This is the admin password. Please change it.
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$secret_secret_key_base
|
||||||
|
name: SECRET_KEY_BASE
|
||||||
|
label: Secret Key Base
|
||||||
|
defaultValue: $$generate_hex(64)
|
||||||
|
description: ""
|
||||||
|
- id: $$config_disable_auth
|
||||||
|
name: DISABLE_AUTH
|
||||||
|
label: Disable Authentication
|
||||||
|
defaultValue: "false"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_disable_registration
|
||||||
|
name: DISABLE_REGISTRATION
|
||||||
|
label: Disable Registration
|
||||||
|
defaultValue: "true"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_postgres_user
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_USER
|
||||||
|
label: PostgreSQL Username
|
||||||
|
defaultValue: postgresql
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_postgres_password
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_PASSWORD
|
||||||
|
label: PostgreSQL Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$config_postgres_db
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_DB
|
||||||
|
label: PostgreSQL Database
|
||||||
|
defaultValue: plausible
|
||||||
|
description: ""
|
||||||
|
- id: $$config_scriptName
|
||||||
|
name: SCRIPT_NAME
|
||||||
|
label: Custom Script Name
|
||||||
|
defaultValue: plausible.js
|
||||||
|
description: This is the default script name.
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: "1.17"
|
||||||
|
documentation: https://docs.gitea.io
|
||||||
|
type: gitea
|
||||||
|
name: Gitea
|
||||||
|
description: Gitea is a community managed lightweight code hosting solution written in Go.
|
||||||
|
labels:
|
||||||
|
- storage
|
||||||
|
- git
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Gitea
|
||||||
|
documentation: https://docs.gitea.io
|
||||||
|
image: gitea/gitea:$$core_version
|
||||||
|
volumes:
|
||||||
|
- $$id-data:/data
|
||||||
|
- /etc/timezone:/etc/timezone:ro
|
||||||
|
- /etc/localtime:/etc/localtime:ro
|
||||||
|
environment:
|
||||||
|
- USER_UID=1000
|
||||||
|
- USER_GID=1000
|
||||||
|
- DOMAIN=$$config_domain
|
||||||
|
- SSH_DOMAIN=$$config_ssh_domain
|
||||||
|
- ROOT_URL=$$config_root_url
|
||||||
|
- SECRET_KEY=$$secret_secret_key
|
||||||
|
- INTERNAL_TOKEN=$$secret_internal_token
|
||||||
|
- SSH_PORT=22
|
||||||
|
- START_SSH_SERVER=$$config_start_ssh_server
|
||||||
|
ports:
|
||||||
|
- "3000"
|
||||||
|
- "22"
|
||||||
|
proxy:
|
||||||
|
- port: "22"
|
||||||
|
hostPort: $$config_hostport_ssh
|
||||||
|
variables:
|
||||||
|
- id: $$config_hostport_ssh
|
||||||
|
name: SSH_PORT
|
||||||
|
label: SSH Port
|
||||||
|
defaultValue: "8022"
|
||||||
|
description: ""
|
||||||
|
required: true
|
||||||
|
- id: $$config_domain
|
||||||
|
name: DOMAIN
|
||||||
|
label: Domain
|
||||||
|
defaultValue: $$generate_domain
|
||||||
|
description: ""
|
||||||
|
- id: $$config_ssh_domain
|
||||||
|
name: SSH_DOMAIN
|
||||||
|
label: SSH Domain
|
||||||
|
defaultValue: $$generate_domain
|
||||||
|
description: ""
|
||||||
|
- id: $$config_start_ssh_server
|
||||||
|
name: START_SSH_SERVER
|
||||||
|
label: Start SSH Server
|
||||||
|
defaultValue: "true"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_root_url
|
||||||
|
name: ROOT_URL
|
||||||
|
label: Root URL of Gitea
|
||||||
|
defaultValue: $$generate_fqdn_slash
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_secret_key
|
||||||
|
name: SECRET_KEY
|
||||||
|
label: Secret Key
|
||||||
|
defaultValue: $$generate_hex(32)
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_internal_token
|
||||||
|
name: INTERNAL_TOKEN
|
||||||
|
label: Internal JWT Token
|
||||||
|
defaultValue: $$generate_token
|
||||||
|
description: ""
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: "20.0"
|
||||||
|
documentation: https://www.keycloak.org/documentation
|
||||||
|
type: keycloak
|
||||||
|
name: Keycloak
|
||||||
|
description: "Keycloak provides user federation, strong authentication, user management, fine-grained authorization, and more."
|
||||||
|
labels:
|
||||||
|
- authentication
|
||||||
|
- authorization
|
||||||
|
- oidconnect
|
||||||
|
- saml2
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Keycloak
|
||||||
|
command: start --db=postgres --features=token-exchange --import-realm
|
||||||
|
depends_on:
|
||||||
|
- $$id-postgresql
|
||||||
|
image: "quay.io/keycloak/keycloak:$$core_version"
|
||||||
|
volumes:
|
||||||
|
- $$id-import:/opt/keycloak/data/import
|
||||||
|
environment:
|
||||||
|
- KC_HEALTH_ENABLED=true
|
||||||
|
- KC_PROXY=edge
|
||||||
|
- KC_DB=postgres
|
||||||
|
- KC_HOSTNAME=$$config_keycloak_domain
|
||||||
|
- KEYCLOAK_ADMIN=$$config_admin_user
|
||||||
|
- KEYCLOAK_ADMIN_PASSWORD=$$secret_keycloak_admin_password
|
||||||
|
- KC_DB_PASSWORD=$$secret_postgres_password
|
||||||
|
- KC_DB_USERNAME=$$config_postgres_user
|
||||||
|
- KC_DB_URL=$$secret_keycloak_database_url
|
||||||
|
ports:
|
||||||
|
- "8080"
|
||||||
|
$$id-postgresql:
|
||||||
|
name: PostgreSQL
|
||||||
|
depends_on: []
|
||||||
|
image: "postgres:14-alpine"
|
||||||
|
volumes:
|
||||||
|
- "$$id-postgresql-data:/var/lib/postgresql/data"
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=$$config_postgres_user
|
||||||
|
- POSTGRES_PASSWORD=$$secret_postgres_password
|
||||||
|
- POSTGRES_DB=$$config_postgres_db
|
||||||
|
ports: []
|
||||||
|
variables:
|
||||||
|
- id: $$config_keycloak_domain
|
||||||
|
name: KEYCLOAK_DOMAIN
|
||||||
|
label: Keycloak Domain
|
||||||
|
defaultValue: $$generate_domain
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_keycloak_database_url
|
||||||
|
name: KEYCLOAK_DATABASE_URL
|
||||||
|
label: Keycloak Database Url
|
||||||
|
defaultValue: >-
|
||||||
|
jdbc:postgresql://$$id-postgresql:5432/$$config_postgres_db
|
||||||
|
description: ""
|
||||||
|
- id: $$config_admin_user
|
||||||
|
name: KEYCLOAK_ADMIN
|
||||||
|
label: Keycloak Admin User
|
||||||
|
defaultValue: $$generate_username
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_keycloak_admin_password
|
||||||
|
name: KEYCLOAK_ADMIN_PASSWORD
|
||||||
|
label: Keycloak Admin Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$config_postgres_user
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_USER
|
||||||
|
label: PostgreSQL User
|
||||||
|
defaultValue: $$generate_username
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_postgres_password
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_PASSWORD
|
||||||
|
label: PostgreSQL Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
showOnConfiguration: true
|
||||||
|
- id: $$config_postgres_db
|
||||||
|
main: $$id-postgresql
|
||||||
|
name: POSTGRES_DB
|
||||||
|
label: PostgreSQL Database
|
||||||
|
defaultValue: keycloak
|
||||||
|
description: ""
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: v3.7
|
||||||
|
documentation: https://github.com/freyacodes/Lavalink
|
||||||
|
description: Standalone audio sending node based on Lavaplayer.
|
||||||
|
type: lavalink
|
||||||
|
name: Lavalink
|
||||||
|
labels:
|
||||||
|
- discord
|
||||||
|
- discord bot
|
||||||
|
- audio
|
||||||
|
- lavalink
|
||||||
|
- jda
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Lavalink
|
||||||
|
image: fredboat/lavalink:$$core_version
|
||||||
|
environment: []
|
||||||
|
volumes:
|
||||||
|
- $$id-lavalink:/lavalink
|
||||||
|
ports:
|
||||||
|
- "2333"
|
||||||
|
files:
|
||||||
|
- location: /opt/Lavalink/application.yml
|
||||||
|
content: >-
|
||||||
|
server:
|
||||||
|
port: $$config_port
|
||||||
|
address: 0.0.0.0
|
||||||
|
lavalink:
|
||||||
|
server:
|
||||||
|
password: "$$secret_password"
|
||||||
|
sources:
|
||||||
|
youtube: true
|
||||||
|
bandcamp: true
|
||||||
|
soundcloud: true
|
||||||
|
twitch: true
|
||||||
|
vimeo: true
|
||||||
|
http: true
|
||||||
|
local: false
|
||||||
|
|
||||||
|
logging:
|
||||||
|
file:
|
||||||
|
path: ./logs/
|
||||||
|
|
||||||
|
level:
|
||||||
|
root: INFO
|
||||||
|
lavalink: INFO
|
||||||
|
|
||||||
|
logback:
|
||||||
|
rollingpolicy:
|
||||||
|
max-file-size: 1GB
|
||||||
|
max-history: 30
|
||||||
|
variables:
|
||||||
|
- id: $$config_port
|
||||||
|
name: PORT
|
||||||
|
label: Port
|
||||||
|
defaultValue: "2333"
|
||||||
|
required: true
|
||||||
|
- id: $$secret_password
|
||||||
|
name: PASSWORD
|
||||||
|
label: Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
required: true
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: v1.8.9
|
||||||
documentation: https://docs.appsmith.com/getting-started/setup/instance-configuration/
|
documentation: https://docs.appsmith.com/getting-started/setup/instance-configuration/
|
||||||
type: appsmith
|
type: appsmith
|
||||||
name: Appsmith
|
name: Appsmith
|
||||||
@@ -32,7 +408,7 @@
|
|||||||
defaultValue: "true"
|
defaultValue: "true"
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 0.56.2
|
defaultVersion: 0.57.4
|
||||||
documentation: https://hub.docker.com/r/zadam/trilium
|
documentation: https://hub.docker.com/r/zadam/trilium
|
||||||
description: "A hierarchical note taking application with focus on building large personal knowledge bases."
|
description: "A hierarchical note taking application with focus on building large personal knowledge bases."
|
||||||
labels:
|
labels:
|
||||||
@@ -52,7 +428,7 @@
|
|||||||
- "8080"
|
- "8080"
|
||||||
variables: []
|
variables: []
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 1.9.2
|
defaultVersion: 1.18.5
|
||||||
documentation: https://hub.docker.com/r/louislam/uptime-kuma
|
documentation: https://hub.docker.com/r/louislam/uptime-kuma
|
||||||
description: A free & fancy self-hosted monitoring tool.
|
description: A free & fancy self-hosted monitoring tool.
|
||||||
labels:
|
labels:
|
||||||
@@ -102,7 +478,7 @@
|
|||||||
- "80"
|
- "80"
|
||||||
variables: []
|
variables: []
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 9.2.3
|
defaultVersion: 9.3.1
|
||||||
documentation: https://hub.docker.com/r/grafana/grafana
|
documentation: https://hub.docker.com/r/grafana/grafana
|
||||||
type: grafana
|
type: grafana
|
||||||
name: Grafana
|
name: Grafana
|
||||||
@@ -123,7 +499,7 @@
|
|||||||
- "3000"
|
- "3000"
|
||||||
variables: []
|
variables: []
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 1.0.3
|
defaultVersion: 1.1.2
|
||||||
documentation: https://appwrite.io/docs
|
documentation: https://appwrite.io/docs
|
||||||
type: appwrite
|
type: appwrite
|
||||||
name: Appwrite
|
name: Appwrite
|
||||||
@@ -270,6 +646,7 @@
|
|||||||
- "$$id-functions:/storage/functions"
|
- "$$id-functions:/storage/functions"
|
||||||
- "$$id-builds:/storage/builds"
|
- "$$id-builds:/storage/builds"
|
||||||
- "/var/run/docker.sock:/var/run/docker.sock"
|
- "/var/run/docker.sock:/var/run/docker.sock"
|
||||||
|
- "/tmp:/tmp:rw"
|
||||||
entrypoint: executor
|
entrypoint: executor
|
||||||
"$$id-influxdb":
|
"$$id-influxdb":
|
||||||
image: appwrite/influxdb:1.5.0
|
image: appwrite/influxdb:1.5.0
|
||||||
@@ -1292,7 +1669,7 @@
|
|||||||
defaultValue: weblate
|
defaultValue: weblate
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 2022.10.14-1a5b0965
|
defaultVersion: 2022.12.12-966e9c3c
|
||||||
documentation: https://docs.searxng.org/
|
documentation: https://docs.searxng.org/
|
||||||
type: searxng
|
type: searxng
|
||||||
name: SearXNG
|
name: SearXNG
|
||||||
@@ -1365,7 +1742,7 @@
|
|||||||
defaultValue: $$generate_password
|
defaultValue: $$generate_password
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: v2.0.6
|
defaultVersion: v3.0.0
|
||||||
documentation: https://glitchtip.com/documentation
|
documentation: https://glitchtip.com/documentation
|
||||||
type: glitchtip
|
type: glitchtip
|
||||||
name: GlitchTip
|
name: GlitchTip
|
||||||
@@ -1587,7 +1964,7 @@
|
|||||||
defaultValue: glitchtip
|
defaultValue: glitchtip
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: v2.13.0
|
defaultVersion: v2.16.0
|
||||||
documentation: https://hasura.io/docs/latest/index/
|
documentation: https://hasura.io/docs/latest/index/
|
||||||
type: hasura
|
type: hasura
|
||||||
name: Hasura
|
name: Hasura
|
||||||
@@ -1606,7 +1983,7 @@
|
|||||||
- HASURA_GRAPHQL_ENABLE_CONSOLE=$$config_hasura_graphql_enable_console
|
- HASURA_GRAPHQL_ENABLE_CONSOLE=$$config_hasura_graphql_enable_console
|
||||||
- >-
|
- >-
|
||||||
HASURA_GRAPHQL_METADATA_DATABASE_URL=$$secret_hasura_graphql_metadata_database_url
|
HASURA_GRAPHQL_METADATA_DATABASE_URL=$$secret_hasura_graphql_metadata_database_url
|
||||||
- HASURA_GRAPHQL_ADMIN_PASSWORD=$$secret_hasura_graphql_admin_password
|
- HASURA_GRAPHQL_ADMIN_SECRET=$$secret_hasura_graphql_admin_secret
|
||||||
ports:
|
ports:
|
||||||
- "8080"
|
- "8080"
|
||||||
$$id-postgresql:
|
$$id-postgresql:
|
||||||
@@ -1632,8 +2009,8 @@
|
|||||||
defaultValue: >-
|
defaultValue: >-
|
||||||
postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
||||||
description: ""
|
description: ""
|
||||||
- id: $$secret_hasura_graphql_admin_password
|
- id: $$secret_hasura_graphql_admin_secret
|
||||||
name: HASURA_GRAPHQL_ADMIN_PASSWORD
|
name: HASURA_GRAPHQL_ADMIN_SECRET
|
||||||
label: Hasura Admin Password
|
label: Hasura Admin Password
|
||||||
defaultValue: $$generate_password
|
defaultValue: $$generate_password
|
||||||
description: ""
|
description: ""
|
||||||
@@ -1654,7 +2031,7 @@
|
|||||||
defaultValue: hasura
|
defaultValue: hasura
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: postgresql-v1.38.0
|
defaultVersion: postgresql-v1.39.5
|
||||||
documentation: https://umami.is/docs/getting-started
|
documentation: https://umami.is/docs/getting-started
|
||||||
type: umami-postgresql
|
type: umami-postgresql
|
||||||
name: Umami
|
name: Umami
|
||||||
@@ -1664,7 +2041,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: Umami
|
name: Umami
|
||||||
documentation: "Official docs are [here](https://umami.is/docs/getting-started)"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- $$id-postgresql
|
- $$id-postgresql
|
||||||
image: "ghcr.io/umami-software/umami:$$core_version"
|
image: "ghcr.io/umami-software/umami:$$core_version"
|
||||||
@@ -1678,7 +2054,6 @@
|
|||||||
- "3000"
|
- "3000"
|
||||||
$$id-postgresql:
|
$$id-postgresql:
|
||||||
name: PostgreSQL
|
name: PostgreSQL
|
||||||
documentation: "Official docs are [here](https://umami.is/docs/getting-started)"
|
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "postgres:12-alpine"
|
image: "postgres:12-alpine"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -1862,7 +2237,214 @@
|
|||||||
description: ""
|
description: ""
|
||||||
showOnConfiguration: true
|
showOnConfiguration: true
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: v0.29.1
|
ignore: true
|
||||||
|
defaultVersion: postgresql-v1.39.5
|
||||||
|
documentation: https://umami.is/docs/getting-started
|
||||||
|
type: umami
|
||||||
|
name: Umami
|
||||||
|
subname: (PostgreSQL)
|
||||||
|
description: >-
|
||||||
|
A simple, easy to use, self-hosted web analytics solution.
|
||||||
|
services:
|
||||||
|
$$id:
|
||||||
|
name: Umami
|
||||||
|
depends_on:
|
||||||
|
- $$id-postgresql
|
||||||
|
image: "ghcr.io/umami-software/umami:$$core_version"
|
||||||
|
volumes: []
|
||||||
|
environment:
|
||||||
|
- ADMIN_PASSWORD=$$secret_admin_password
|
||||||
|
- DATABASE_URL=$$secret_database_url
|
||||||
|
- DATABASE_TYPE=$$config_database_type
|
||||||
|
- HASH_SALT=$$secret_hash_salt
|
||||||
|
ports:
|
||||||
|
- "3000"
|
||||||
|
$$id-postgresql:
|
||||||
|
name: PostgreSQL
|
||||||
|
depends_on: []
|
||||||
|
image: "postgres:12-alpine"
|
||||||
|
volumes:
|
||||||
|
- "$$id-postgresql-data:/var/lib/postgresql/data"
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=$$config_postgres_user
|
||||||
|
- POSTGRES_PASSWORD=$$secret_postgres_password
|
||||||
|
- POSTGRES_DB=$$config_postgres_db
|
||||||
|
ports: []
|
||||||
|
files:
|
||||||
|
- location: /docker-entrypoint-initdb.d/schema.postgresql.sql
|
||||||
|
content: |2-
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "account" (
|
||||||
|
"user_id" SERIAL NOT NULL,
|
||||||
|
"username" VARCHAR(255) NOT NULL,
|
||||||
|
"password" VARCHAR(60) NOT NULL,
|
||||||
|
"is_admin" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
PRIMARY KEY ("user_id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "event" (
|
||||||
|
"event_id" SERIAL NOT NULL,
|
||||||
|
"website_id" INTEGER NOT NULL,
|
||||||
|
"session_id" INTEGER NOT NULL,
|
||||||
|
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"url" VARCHAR(500) NOT NULL,
|
||||||
|
"event_type" VARCHAR(50) NOT NULL,
|
||||||
|
"event_value" VARCHAR(50) NOT NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY ("event_id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "pageview" (
|
||||||
|
"view_id" SERIAL NOT NULL,
|
||||||
|
"website_id" INTEGER NOT NULL,
|
||||||
|
"session_id" INTEGER NOT NULL,
|
||||||
|
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"url" VARCHAR(500) NOT NULL,
|
||||||
|
"referrer" VARCHAR(500),
|
||||||
|
|
||||||
|
PRIMARY KEY ("view_id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "session" (
|
||||||
|
"session_id" SERIAL NOT NULL,
|
||||||
|
"session_uuid" UUID NOT NULL,
|
||||||
|
"website_id" INTEGER NOT NULL,
|
||||||
|
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"hostname" VARCHAR(100),
|
||||||
|
"browser" VARCHAR(20),
|
||||||
|
"os" VARCHAR(20),
|
||||||
|
"device" VARCHAR(20),
|
||||||
|
"screen" VARCHAR(11),
|
||||||
|
"language" VARCHAR(35),
|
||||||
|
"country" CHAR(2),
|
||||||
|
|
||||||
|
PRIMARY KEY ("session_id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "website" (
|
||||||
|
"website_id" SERIAL NOT NULL,
|
||||||
|
"website_uuid" UUID NOT NULL,
|
||||||
|
"user_id" INTEGER NOT NULL,
|
||||||
|
"name" VARCHAR(100) NOT NULL,
|
||||||
|
"domain" VARCHAR(500),
|
||||||
|
"share_id" VARCHAR(64),
|
||||||
|
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
PRIMARY KEY ("website_id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "event_created_at_idx" ON "event"("created_at");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
insert into account (username, password, is_admin) values ('admin', '$$hashed$$secret_admin_password', true);
|
||||||
|
variables:
|
||||||
|
- id: $$secret_database_url
|
||||||
|
name: DATABASE_URL
|
||||||
|
label: Database URL for PostgreSQL
|
||||||
|
defaultValue: >-
|
||||||
|
postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_hash_salt
|
||||||
|
name: HASH_SALT
|
||||||
|
label: Hash Salt
|
||||||
|
defaultValue: $$generate_hex(64)
|
||||||
|
description: ""
|
||||||
|
- id: $$config_database_type
|
||||||
|
name: DATABASE_TYPE
|
||||||
|
label: Database Type
|
||||||
|
defaultValue: "postgresql"
|
||||||
|
description: ""
|
||||||
|
- id: $$config_postgres_user
|
||||||
|
name: POSTGRES_USER
|
||||||
|
label: PostgreSQL User
|
||||||
|
defaultValue: $$generate_username
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_postgres_password
|
||||||
|
name: POSTGRES_PASSWORD
|
||||||
|
label: PostgreSQL Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
- id: $$config_postgres_db
|
||||||
|
name: POSTGRES_DB
|
||||||
|
label: PostgreSQL Database
|
||||||
|
defaultValue: umami
|
||||||
|
description: ""
|
||||||
|
- id: $$secret_admin_password
|
||||||
|
name: ADMIN_PASSWORD
|
||||||
|
label: Initial Admin Password
|
||||||
|
defaultValue: $$generate_password
|
||||||
|
description: ""
|
||||||
|
showOnConfiguration: true
|
||||||
|
- templateVersion: 1.0.0
|
||||||
|
defaultVersion: v0.30.1
|
||||||
documentation: https://docs.meilisearch.com/learn/getting_started/quick_start.html
|
documentation: https://docs.meilisearch.com/learn/getting_started/quick_start.html
|
||||||
type: meilisearch
|
type: meilisearch
|
||||||
name: MeiliSearch
|
name: MeiliSearch
|
||||||
@@ -1871,7 +2453,7 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: MeiliSearch
|
name: MeiliSearch
|
||||||
documentation: "https://docs.meilisearch.com/"
|
documentation: https://docs.meilisearch.com/
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "getmeili/meilisearch:$$core_version"
|
image: "getmeili/meilisearch:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -1893,7 +2475,8 @@
|
|||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
ignore: true
|
ignore: true
|
||||||
defaultVersion: latest
|
defaultVersion: latest
|
||||||
documentation: https://ghost.org/resources/
|
documentation: https://docs.ghost.org
|
||||||
|
arch: amd64
|
||||||
type: ghost-mariadb
|
type: ghost-mariadb
|
||||||
name: Ghost
|
name: Ghost
|
||||||
subname: (MariaDB)
|
subname: (MariaDB)
|
||||||
@@ -1905,7 +2488,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: Ghost
|
name: Ghost
|
||||||
documentation: "Taken from https://docs.ghost.org/"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- $$id-mariadb
|
- $$id-mariadb
|
||||||
image: "bitnami/ghost:$$core_version"
|
image: "bitnami/ghost:$$core_version"
|
||||||
@@ -2010,8 +2592,8 @@
|
|||||||
defaultValue: $$generate_password
|
defaultValue: $$generate_password
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: "5.22"
|
defaultVersion: "5.25.3"
|
||||||
documentation: https://ghost.org/resources/
|
documentation: https://docs.ghost.org
|
||||||
type: ghost-only
|
type: ghost-only
|
||||||
name: Ghost
|
name: Ghost
|
||||||
subname: (without Database)
|
subname: (without Database)
|
||||||
@@ -2020,7 +2602,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: Ghost
|
name: Ghost
|
||||||
documentation: "Taken from https://docs.ghost.org/"
|
|
||||||
image: "ghost:$$core_version"
|
image: "ghost:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-ghost:/var/lib/ghost/content"
|
- "$$id-ghost:/var/lib/ghost/content"
|
||||||
@@ -2075,8 +2656,8 @@
|
|||||||
placeholder: "ghost_db"
|
placeholder: "ghost_db"
|
||||||
required: true
|
required: true
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: "5.22"
|
defaultVersion: "5.25.3"
|
||||||
documentation: https://ghost.org/resources/
|
documentation: https://docs.ghost.org
|
||||||
type: ghost-mysql
|
type: ghost-mysql
|
||||||
name: Ghost
|
name: Ghost
|
||||||
subname: (MySQL)
|
subname: (MySQL)
|
||||||
@@ -2085,7 +2666,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: Ghost
|
name: Ghost
|
||||||
documentation: "Taken from https://docs.ghost.org/"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- $$id-mysql
|
- $$id-mysql
|
||||||
image: "ghost:$$core_version"
|
image: "ghost:$$core_version"
|
||||||
@@ -2166,7 +2746,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: WordPress
|
name: WordPress
|
||||||
documentation: " Taken from https://docs.docker.com/compose/wordpress/"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- $$id-mysql
|
- $$id-mysql
|
||||||
image: "wordpress:$$core_version"
|
image: "wordpress:$$core_version"
|
||||||
@@ -2184,7 +2763,7 @@
|
|||||||
name: MySQL
|
name: MySQL
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "bitnami/mysql:5.7"
|
image: "bitnami/mysql:5.7"
|
||||||
imageArm: "mysql:5.7"
|
imageArm: "mysql:8.0"
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-mysql-data:/bitnami/mysql/data"
|
- "$$id-mysql-data:/bitnami/mysql/data"
|
||||||
volumesArm:
|
volumesArm:
|
||||||
@@ -2257,7 +2836,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: WordPress
|
name: WordPress
|
||||||
documentation: "Taken from https://docs.docker.com/compose/wordpress/"
|
|
||||||
image: "wordpress:$$core_version"
|
image: "wordpress:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-wordpress-data:/var/www/html"
|
- "$$id-wordpress-data:/var/www/html"
|
||||||
@@ -2319,7 +2897,7 @@
|
|||||||
define('WP_DEBUG_DISPLAY', false);
|
define('WP_DEBUG_DISPLAY', false);
|
||||||
@ini_set('display_errors', 0);
|
@ini_set('display_errors', 0);
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 4.7.1
|
defaultVersion: 4.9.0
|
||||||
documentation: https://coder.com/docs/coder-oss/latest
|
documentation: https://coder.com/docs/coder-oss/latest
|
||||||
type: vscodeserver
|
type: vscodeserver
|
||||||
name: VSCode Server
|
name: VSCode Server
|
||||||
@@ -2331,7 +2909,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: VSCode Server
|
name: VSCode Server
|
||||||
documentation: "Taken from https://github.com/coder/code-server/. "
|
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "codercom/code-server:$$core_version"
|
image: "codercom/code-server:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -2351,7 +2928,7 @@
|
|||||||
description: ""
|
description: ""
|
||||||
showOnConfiguration: true
|
showOnConfiguration: true
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: RELEASE.2022-10-15T19-57-03Z
|
defaultVersion: RELEASE.2022-12-12T19-27-27Z
|
||||||
documentation: https://min.io/docs/minio
|
documentation: https://min.io/docs/minio
|
||||||
type: minio
|
type: minio
|
||||||
name: MinIO
|
name: MinIO
|
||||||
@@ -2363,7 +2940,6 @@
|
|||||||
$$id:
|
$$id:
|
||||||
name: MinIO
|
name: MinIO
|
||||||
command: "server /data --console-address :9001"
|
command: "server /data --console-address :9001"
|
||||||
documentation: "Taken from https://docs.min.io/docs/minio-docker-quickstart-guide.html"
|
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "minio/minio:$$core_version"
|
image: "minio/minio:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -2423,7 +2999,6 @@
|
|||||||
$$id:
|
$$id:
|
||||||
name: Fider
|
name: Fider
|
||||||
image: "getfider/fider:$$core_version"
|
image: "getfider/fider:$$core_version"
|
||||||
documentation: "Taken from https://hub.docker.com/r/getfider/fider/"
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- $$id-postgresql
|
- $$id-postgresql
|
||||||
environment:
|
environment:
|
||||||
@@ -2443,7 +3018,6 @@
|
|||||||
- "3000"
|
- "3000"
|
||||||
$$id-postgresql:
|
$$id-postgresql:
|
||||||
name: PostgreSQL
|
name: PostgreSQL
|
||||||
documentation: "Taken from https://hub.docker.com/r/getfider/fider/"
|
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "postgres:12-alpine"
|
image: "postgres:12-alpine"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -2532,7 +3106,7 @@
|
|||||||
defaultValue: $$generate_username
|
defaultValue: $$generate_username
|
||||||
description: ""
|
description: ""
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 0.198.1
|
defaultVersion: 0.207.0
|
||||||
documentation: https://docs.n8n.io
|
documentation: https://docs.n8n.io
|
||||||
type: n8n
|
type: n8n
|
||||||
name: n8n.io
|
name: n8n.io
|
||||||
@@ -2546,7 +3120,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: N8n
|
name: N8n
|
||||||
documentation: "Taken from https://hub.docker.com/r/n8nio/n8n"
|
|
||||||
depends_on: []
|
depends_on: []
|
||||||
image: "n8nio/n8n:$$core_version"
|
image: "n8nio/n8n:$$core_version"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -2566,6 +3139,7 @@
|
|||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: stable
|
defaultVersion: stable
|
||||||
documentation: https://plausible.io/doc/
|
documentation: https://plausible.io/doc/
|
||||||
|
arch: amd64
|
||||||
type: plausibleanalytics
|
type: plausibleanalytics
|
||||||
name: Plausible Analytics
|
name: Plausible Analytics
|
||||||
description: A lightweight and open-source website analytics tool.
|
description: A lightweight and open-source website analytics tool.
|
||||||
@@ -2579,7 +3153,6 @@
|
|||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
name: Plausible Analytics
|
name: Plausible Analytics
|
||||||
documentation: "Taken from https://plausible.io/"
|
|
||||||
command: >-
|
command: >-
|
||||||
sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db
|
sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db
|
||||||
migrate && /entrypoint.sh db init-admin && /entrypoint.sh run"
|
migrate && /entrypoint.sh db init-admin && /entrypoint.sh run"
|
||||||
@@ -2601,8 +3174,7 @@
|
|||||||
- "8000"
|
- "8000"
|
||||||
$$id-postgresql:
|
$$id-postgresql:
|
||||||
name: PostgreSQL
|
name: PostgreSQL
|
||||||
documentation: "Taken from https://plausible.io/"
|
image: "bitnami/postgresql:13"
|
||||||
image: "bitnami/postgresql:13.2.0"
|
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-postgresql-data:/bitnami/postgresql"
|
- "$$id-postgresql-data:/bitnami/postgresql"
|
||||||
environment:
|
environment:
|
||||||
@@ -2611,10 +3183,9 @@
|
|||||||
- POSTGRESQL_DATABASE=$$config_postgresql_database
|
- POSTGRESQL_DATABASE=$$config_postgresql_database
|
||||||
$$id-clickhouse:
|
$$id-clickhouse:
|
||||||
name: Clickhouse
|
name: Clickhouse
|
||||||
documentation: "Taken from https://plausible.io/"
|
|
||||||
volumes:
|
volumes:
|
||||||
- "$$id-clickhouse-data:/var/lib/clickhouse"
|
- "$$id-clickhouse-data:/var/lib/clickhouse"
|
||||||
image: "yandex/clickhouse-server:21.3.2.5"
|
image: "clickhouse/clickhouse-server:22.6-alpine"
|
||||||
ulimits:
|
ulimits:
|
||||||
nofile:
|
nofile:
|
||||||
soft: 262144
|
soft: 262144
|
||||||
@@ -2712,7 +3283,7 @@
|
|||||||
defaultValue: plausible.js
|
defaultValue: plausible.js
|
||||||
description: This is the default script name.
|
description: This is the default script name.
|
||||||
- templateVersion: 1.0.0
|
- templateVersion: 1.0.0
|
||||||
defaultVersion: 0.98.1
|
defaultVersion: 0.99.1
|
||||||
documentation: https://docs.nocodb.com
|
documentation: https://docs.nocodb.com
|
||||||
type: nocodb
|
type: nocodb
|
||||||
name: NocoDB
|
name: NocoDB
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
{
|
{
|
||||||
"watch": ["src"],
|
"watch": [
|
||||||
"ignore": ["src/**/*.test.ts"],
|
"src"
|
||||||
|
],
|
||||||
|
"ignore": [
|
||||||
|
"src/**/*.test.ts"
|
||||||
|
],
|
||||||
"ext": "ts,mjs,json,graphql",
|
"ext": "ts,mjs,json,graphql",
|
||||||
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --minify=true --platform=node --outdir=build --format=cjs && node build",
|
"exec": "rimraf build && esbuild `find src \\( -name '*.ts' \\)` --platform=node --outdir=build --format=cjs && node build",
|
||||||
"legacyWatch": true
|
"legacyWatch": true
|
||||||
}
|
}
|
||||||
@@ -9,62 +9,67 @@
|
|||||||
"db:studio": "prisma studio",
|
"db:studio": "prisma studio",
|
||||||
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
"db:migrate": "COOLIFY_DATABASE_URL=file:../db/migration.db prisma migrate dev --skip-seed --name",
|
||||||
"dev": "nodemon",
|
"dev": "nodemon",
|
||||||
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --minify=true --platform=node --outdir=build --format=cjs",
|
"build": "rimraf build && esbuild `find src \\( -name '*.ts' \\)| grep -v client/` --platform=node --outdir=build --format=cjs",
|
||||||
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
"format": "prettier --write 'src/**/*.{js,ts,json,md}'",
|
||||||
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
"lint": "prettier --check 'src/**/*.{js,ts,json,md}' && eslint --ignore-path .eslintignore .",
|
||||||
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
|
"start": "NODE_ENV=production pnpm prisma migrate deploy && pnpm prisma generate && pnpm prisma db seed && node index.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@breejs/ts-worker": "2.0.0",
|
"@breejs/ts-worker": "2.0.0",
|
||||||
"@fastify/autoload": "5.4.1",
|
"@fastify/autoload": "5.5.0",
|
||||||
"@fastify/cookie": "8.3.0",
|
"@fastify/cookie": "8.3.0",
|
||||||
"@fastify/cors": "8.1.1",
|
"@fastify/cors": "8.2.0",
|
||||||
"@fastify/env": "4.1.0",
|
"@fastify/env": "4.1.0",
|
||||||
"@fastify/jwt": "6.3.2",
|
"@fastify/jwt": "6.3.3",
|
||||||
"@fastify/multipart": "7.3.0",
|
"@fastify/multipart": "7.3.0",
|
||||||
"@fastify/static": "6.5.0",
|
"@fastify/static": "6.5.1",
|
||||||
"@iarna/toml": "2.2.5",
|
"@iarna/toml": "2.2.5",
|
||||||
"@ladjs/graceful": "3.0.2",
|
"@ladjs/graceful": "3.0.2",
|
||||||
"@prisma/client": "4.5.0",
|
"@prisma/client": "4.6.1",
|
||||||
|
"@sentry/node": "7.21.1",
|
||||||
|
"@sentry/tracing": "7.21.1",
|
||||||
|
"axe": "11.0.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bree": "9.1.2",
|
"bree": "9.1.2",
|
||||||
"cabin": "9.1.2",
|
"cabin": "11.0.1",
|
||||||
"compare-versions": "5.0.1",
|
"compare-versions": "5.0.1",
|
||||||
"csv-parse": "5.3.1",
|
"csv-parse": "5.3.2",
|
||||||
"csvtojson": "2.0.10",
|
"csvtojson": "2.0.10",
|
||||||
"cuid": "2.1.8",
|
"cuid": "2.1.8",
|
||||||
"dayjs": "1.11.6",
|
"dayjs": "1.11.6",
|
||||||
"dockerode": "3.3.4",
|
"dockerode": "3.3.4",
|
||||||
"dotenv-extended": "2.9.0",
|
"dotenv-extended": "2.9.0",
|
||||||
"execa": "6.1.0",
|
"execa": "6.1.0",
|
||||||
"fastify": "4.9.2",
|
"fastify": "4.10.2",
|
||||||
"fastify-plugin": "4.3.0",
|
"fastify-plugin": "4.3.0",
|
||||||
"fastify-socket.io": "4.0.0",
|
"fastify-socket.io": "4.0.0",
|
||||||
"generate-password": "1.7.0",
|
"generate-password": "1.7.0",
|
||||||
"got": "12.5.2",
|
"got": "12.5.3",
|
||||||
"is-ip": "5.0.0",
|
"is-ip": "5.0.0",
|
||||||
"is-port-reachable": "4.0.0",
|
"is-port-reachable": "4.0.0",
|
||||||
"js-yaml": "4.1.0",
|
"js-yaml": "4.1.0",
|
||||||
"jsonwebtoken": "8.5.1",
|
"jsonwebtoken": "8.5.1",
|
||||||
|
"minimist": "^1.2.7",
|
||||||
"node-forge": "1.3.1",
|
"node-forge": "1.3.1",
|
||||||
"node-os-utils": "1.3.7",
|
"node-os-utils": "1.3.7",
|
||||||
"p-all": "4.0.0",
|
"p-all": "4.0.0",
|
||||||
"p-throttle": "5.0.0",
|
"p-throttle": "5.0.0",
|
||||||
"prisma": "4.5.0",
|
"prisma": "4.6.1",
|
||||||
"public-ip": "6.0.1",
|
"public-ip": "6.0.1",
|
||||||
"pump": "3.0.0",
|
"pump": "3.0.0",
|
||||||
|
"shell-quote": "^1.7.4",
|
||||||
"socket.io": "4.5.3",
|
"socket.io": "4.5.3",
|
||||||
"ssh-config": "4.1.6",
|
"ssh-config": "4.1.6",
|
||||||
"strip-ansi": "7.0.1",
|
"strip-ansi": "7.0.1",
|
||||||
"unique-names-generator": "4.7.1"
|
"unique-names-generator": "4.7.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "18.11.6",
|
"@types/node": "18.11.9",
|
||||||
"@types/node-os-utils": "1.3.0",
|
"@types/node-os-utils": "1.3.0",
|
||||||
"@typescript-eslint/eslint-plugin": "5.41.0",
|
"@typescript-eslint/eslint-plugin": "5.44.0",
|
||||||
"@typescript-eslint/parser": "5.41.0",
|
"@typescript-eslint/parser": "5.44.0",
|
||||||
"esbuild": "0.15.12",
|
"esbuild": "0.15.15",
|
||||||
"eslint": "8.26.0",
|
"eslint": "8.28.0",
|
||||||
"eslint-config-prettier": "8.5.0",
|
"eslint-config-prettier": "8.5.0",
|
||||||
"eslint-plugin-prettier": "4.2.1",
|
"eslint-plugin-prettier": "4.2.1",
|
||||||
"nodemon": "2.0.20",
|
"nodemon": "2.0.20",
|
||||||
@@ -72,7 +77,7 @@
|
|||||||
"rimraf": "3.0.2",
|
"rimraf": "3.0.2",
|
||||||
"tsconfig-paths": "4.1.0",
|
"tsconfig-paths": "4.1.0",
|
||||||
"types-fastify-socket.io": "0.0.1",
|
"types-fastify-socket.io": "0.0.1",
|
||||||
"typescript": "4.8.4"
|
"typescript": "4.9.3"
|
||||||
},
|
},
|
||||||
"prisma": {
|
"prisma": {
|
||||||
"seed": "node prisma/seed.js"
|
"seed": "node prisma/seed.js"
|
||||||
|
|||||||
@@ -0,0 +1,45 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"proxyPassword" TEXT NOT NULL,
|
||||||
|
"proxyUser" TEXT NOT NULL,
|
||||||
|
"proxyHash" TEXT,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt") SELECT "DNSServers", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "proxyHash", "proxyPassword", "proxyUser", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
CREATE TABLE "new_ApplicationPersistentStorage" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"applicationId" TEXT NOT NULL,
|
||||||
|
"path" TEXT NOT NULL,
|
||||||
|
"oldPath" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
CONSTRAINT "ApplicationPersistentStorage_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_ApplicationPersistentStorage" ("applicationId", "createdAt", "id", "path", "updatedAt") SELECT "applicationId", "createdAt", "id", "path", "updatedAt" FROM "ApplicationPersistentStorage";
|
||||||
|
DROP TABLE "ApplicationPersistentStorage";
|
||||||
|
ALTER TABLE "new_ApplicationPersistentStorage" RENAME TO "ApplicationPersistentStorage";
|
||||||
|
CREATE UNIQUE INDEX "ApplicationPersistentStorage_applicationId_path_key" ON "ApplicationPersistentStorage"("applicationId", "path");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `proxyHash` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
- You are about to drop the column `proxyPassword` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
- You are about to drop the column `proxyUser` on the `Setting` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "DockerRegistry" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"username" TEXT,
|
||||||
|
"password" TEXT,
|
||||||
|
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"teamId" TEXT,
|
||||||
|
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Application" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"repository" TEXT,
|
||||||
|
"configHash" TEXT,
|
||||||
|
"branch" TEXT,
|
||||||
|
"buildPack" TEXT,
|
||||||
|
"projectId" INTEGER,
|
||||||
|
"port" INTEGER,
|
||||||
|
"exposePort" INTEGER,
|
||||||
|
"installCommand" TEXT,
|
||||||
|
"buildCommand" TEXT,
|
||||||
|
"startCommand" TEXT,
|
||||||
|
"baseDirectory" TEXT,
|
||||||
|
"publishDirectory" TEXT,
|
||||||
|
"deploymentType" TEXT,
|
||||||
|
"phpModules" TEXT,
|
||||||
|
"pythonWSGI" TEXT,
|
||||||
|
"pythonModule" TEXT,
|
||||||
|
"pythonVariable" TEXT,
|
||||||
|
"dockerFileLocation" TEXT,
|
||||||
|
"denoMainFile" TEXT,
|
||||||
|
"denoOptions" TEXT,
|
||||||
|
"dockerComposeFile" TEXT,
|
||||||
|
"dockerComposeFileLocation" TEXT,
|
||||||
|
"dockerComposeConfiguration" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"destinationDockerId" TEXT,
|
||||||
|
"gitSourceId" TEXT,
|
||||||
|
"baseImage" TEXT,
|
||||||
|
"baseBuildImage" TEXT,
|
||||||
|
"dockerRegistryId" TEXT NOT NULL DEFAULT '0',
|
||||||
|
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "exposePort", "fqdn", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||||
|
DROP TABLE "Application";
|
||||||
|
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT,
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", coalesce("isAPIDebuggingEnabled", false) AS "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
CREATE TABLE "new_GlitchTip" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"postgresqlUser" TEXT NOT NULL,
|
||||||
|
"postgresqlPassword" TEXT NOT NULL,
|
||||||
|
"postgresqlDatabase" TEXT NOT NULL,
|
||||||
|
"postgresqlPublicPort" INTEGER,
|
||||||
|
"secretKeyBase" TEXT,
|
||||||
|
"defaultEmail" TEXT NOT NULL,
|
||||||
|
"defaultUsername" TEXT NOT NULL,
|
||||||
|
"defaultPassword" TEXT NOT NULL,
|
||||||
|
"defaultEmailFrom" TEXT NOT NULL DEFAULT 'glitchtip@domain.tdl',
|
||||||
|
"emailSmtpHost" TEXT DEFAULT 'domain.tdl',
|
||||||
|
"emailSmtpPort" INTEGER DEFAULT 25,
|
||||||
|
"emailSmtpUser" TEXT,
|
||||||
|
"emailSmtpPassword" TEXT,
|
||||||
|
"emailSmtpUseTls" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"emailSmtpUseSsl" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"emailBackend" TEXT,
|
||||||
|
"mailgunApiKey" TEXT,
|
||||||
|
"sendgridApiKey" TEXT,
|
||||||
|
"enableOpenUserRegistration" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"serviceId" TEXT NOT NULL,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
CONSTRAINT "GlitchTip_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_GlitchTip" ("createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", "emailSmtpUseSsl", "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt") SELECT "createdAt", "defaultEmail", "defaultEmailFrom", "defaultPassword", "defaultUsername", "emailBackend", "emailSmtpHost", "emailSmtpPassword", "emailSmtpPort", coalesce("emailSmtpUseSsl", false) AS "emailSmtpUseSsl", coalesce("emailSmtpUseTls", false) AS "emailSmtpUseTls", "emailSmtpUser", "enableOpenUserRegistration", "id", "mailgunApiKey", "postgresqlDatabase", "postgresqlPassword", "postgresqlPublicPort", "postgresqlUser", "secretKeyBase", "sendgridApiKey", "serviceId", "updatedAt" FROM "GlitchTip";
|
||||||
|
DROP TABLE "GlitchTip";
|
||||||
|
ALTER TABLE "new_GlitchTip" RENAME TO "GlitchTip";
|
||||||
|
CREATE UNIQUE INDEX "GlitchTip_serviceId_key" ON "GlitchTip"("serviceId");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Setting" ADD COLUMN "sentryDSN" TEXT;
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"sentryDSN" TEXT,
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT coalesce("DNSServers", '1.1.1.1,8.8.8.8') AS "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_Setting" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"minPort" INTEGER NOT NULL DEFAULT 9000,
|
||||||
|
"maxPort" INTEGER NOT NULL DEFAULT 9100,
|
||||||
|
"DNSServers" TEXT NOT NULL DEFAULT '1.1.1.1,8.8.8.8',
|
||||||
|
"ipv4" TEXT,
|
||||||
|
"ipv6" TEXT,
|
||||||
|
"arch" TEXT,
|
||||||
|
"concurrentBuilds" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"applicationStoragePathMigrationFinished" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"numberOfDockerImagesKeptLocally" INTEGER NOT NULL DEFAULT 3,
|
||||||
|
"proxyDefaultRedirect" TEXT,
|
||||||
|
"doNotTrack" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"sentryDSN" TEXT,
|
||||||
|
"previewSeparator" TEXT NOT NULL DEFAULT '.',
|
||||||
|
"isAPIDebuggingEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isRegistrationEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isAutoUpdateEnabled" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"isDNSCheckEnabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"isTraefikUsed" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Setting" ("DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", "numberOfDockerImagesKeptLocally", "proxyDefaultRedirect", "sentryDSN", "updatedAt") SELECT "DNSServers", "applicationStoragePathMigrationFinished", "arch", "concurrentBuilds", "createdAt", "doNotTrack", "dualCerts", "fqdn", "id", "ipv4", "ipv6", "isAPIDebuggingEnabled", "isAutoUpdateEnabled", "isDNSCheckEnabled", "isRegistrationEnabled", "isTraefikUsed", "maxPort", "minPort", 3, "proxyDefaultRedirect", "sentryDSN", "updatedAt" FROM "Setting";
|
||||||
|
DROP TABLE "Setting";
|
||||||
|
ALTER TABLE "new_Setting" RENAME TO "Setting";
|
||||||
|
CREATE UNIQUE INDEX "Setting_fqdn_key" ON "Setting"("fqdn");
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "gitCommitHash" TEXT;
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- You are about to drop the column `isSystemWide` on the `DockerRegistry` table. All the data in the column will be lost.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- RedefineTables
|
||||||
|
PRAGMA foreign_keys=OFF;
|
||||||
|
CREATE TABLE "new_DockerRegistry" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"username" TEXT,
|
||||||
|
"password" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"teamId" TEXT,
|
||||||
|
CONSTRAINT "DockerRegistry_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_DockerRegistry" ("createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username") SELECT "createdAt", "id", "name", "password", "teamId", "updatedAt", "url", "username" FROM "DockerRegistry";
|
||||||
|
DROP TABLE "DockerRegistry";
|
||||||
|
ALTER TABLE "new_DockerRegistry" RENAME TO "DockerRegistry";
|
||||||
|
CREATE TABLE "new_Application" (
|
||||||
|
"id" TEXT NOT NULL PRIMARY KEY,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"fqdn" TEXT,
|
||||||
|
"repository" TEXT,
|
||||||
|
"configHash" TEXT,
|
||||||
|
"branch" TEXT,
|
||||||
|
"buildPack" TEXT,
|
||||||
|
"projectId" INTEGER,
|
||||||
|
"port" INTEGER,
|
||||||
|
"exposePort" INTEGER,
|
||||||
|
"installCommand" TEXT,
|
||||||
|
"buildCommand" TEXT,
|
||||||
|
"startCommand" TEXT,
|
||||||
|
"baseDirectory" TEXT,
|
||||||
|
"publishDirectory" TEXT,
|
||||||
|
"deploymentType" TEXT,
|
||||||
|
"phpModules" TEXT,
|
||||||
|
"pythonWSGI" TEXT,
|
||||||
|
"pythonModule" TEXT,
|
||||||
|
"pythonVariable" TEXT,
|
||||||
|
"dockerFileLocation" TEXT,
|
||||||
|
"denoMainFile" TEXT,
|
||||||
|
"denoOptions" TEXT,
|
||||||
|
"dockerComposeFile" TEXT,
|
||||||
|
"dockerComposeFileLocation" TEXT,
|
||||||
|
"dockerComposeConfiguration" TEXT,
|
||||||
|
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" DATETIME NOT NULL,
|
||||||
|
"destinationDockerId" TEXT,
|
||||||
|
"gitSourceId" TEXT,
|
||||||
|
"gitCommitHash" TEXT,
|
||||||
|
"baseImage" TEXT,
|
||||||
|
"baseBuildImage" TEXT,
|
||||||
|
"dockerRegistryId" TEXT,
|
||||||
|
CONSTRAINT "Application_gitSourceId_fkey" FOREIGN KEY ("gitSourceId") REFERENCES "GitSource" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_destinationDockerId_fkey" FOREIGN KEY ("destinationDockerId") REFERENCES "DestinationDocker" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||||
|
CONSTRAINT "Application_dockerRegistryId_fkey" FOREIGN KEY ("dockerRegistryId") REFERENCES "DockerRegistry" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||||
|
);
|
||||||
|
INSERT INTO "new_Application" ("baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt") SELECT "baseBuildImage", "baseDirectory", "baseImage", "branch", "buildCommand", "buildPack", "configHash", "createdAt", "denoMainFile", "denoOptions", "deploymentType", "destinationDockerId", "dockerComposeConfiguration", "dockerComposeFile", "dockerComposeFileLocation", "dockerFileLocation", "dockerRegistryId", "exposePort", "fqdn", "gitCommitHash", "gitSourceId", "id", "installCommand", "name", "phpModules", "port", "projectId", "publishDirectory", "pythonModule", "pythonVariable", "pythonWSGI", "repository", "startCommand", "updatedAt" FROM "Application";
|
||||||
|
DROP TABLE "Application";
|
||||||
|
ALTER TABLE "new_Application" RENAME TO "Application";
|
||||||
|
PRAGMA foreign_key_check;
|
||||||
|
PRAGMA foreign_keys=ON;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "simpleDockerfile" TEXT;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Application" ADD COLUMN "dockerRegistryImageName" TEXT;
|
||||||
@@ -21,25 +21,27 @@ model Certificate {
|
|||||||
model Setting {
|
model Setting {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
fqdn String? @unique
|
fqdn String? @unique
|
||||||
isAPIDebuggingEnabled Boolean? @default(false)
|
|
||||||
isRegistrationEnabled Boolean @default(false)
|
|
||||||
dualCerts Boolean @default(false)
|
dualCerts Boolean @default(false)
|
||||||
minPort Int @default(9000)
|
minPort Int @default(9000)
|
||||||
maxPort Int @default(9100)
|
maxPort Int @default(9100)
|
||||||
proxyPassword String
|
DNSServers String @default("1.1.1.1,8.8.8.8")
|
||||||
proxyUser String
|
|
||||||
proxyHash String?
|
|
||||||
proxyDefaultRedirect String?
|
|
||||||
isAutoUpdateEnabled Boolean @default(false)
|
|
||||||
isDNSCheckEnabled Boolean @default(true)
|
|
||||||
DNSServers String?
|
|
||||||
isTraefikUsed Boolean @default(true)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
ipv4 String?
|
ipv4 String?
|
||||||
ipv6 String?
|
ipv6 String?
|
||||||
arch String?
|
arch String?
|
||||||
concurrentBuilds Int @default(1)
|
concurrentBuilds Int @default(1)
|
||||||
|
applicationStoragePathMigrationFinished Boolean @default(false)
|
||||||
|
numberOfDockerImagesKeptLocally Int @default(3)
|
||||||
|
proxyDefaultRedirect String?
|
||||||
|
doNotTrack Boolean @default(false)
|
||||||
|
sentryDSN String?
|
||||||
|
previewSeparator String @default(".")
|
||||||
|
isAPIDebuggingEnabled Boolean @default(false)
|
||||||
|
isRegistrationEnabled Boolean @default(true)
|
||||||
|
isAutoUpdateEnabled Boolean @default(false)
|
||||||
|
isDNSCheckEnabled Boolean @default(true)
|
||||||
|
isTraefikUsed Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
}
|
}
|
||||||
|
|
||||||
model User {
|
model User {
|
||||||
@@ -82,6 +84,7 @@ model Team {
|
|||||||
service Service[]
|
service Service[]
|
||||||
users User[]
|
users User[]
|
||||||
certificate Certificate[]
|
certificate Certificate[]
|
||||||
|
dockerRegistry DockerRegistry[]
|
||||||
}
|
}
|
||||||
|
|
||||||
model TeamInvitation {
|
model TeamInvitation {
|
||||||
@@ -125,16 +128,22 @@ model Application {
|
|||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
destinationDockerId String?
|
destinationDockerId String?
|
||||||
gitSourceId String?
|
gitSourceId String?
|
||||||
|
gitCommitHash String?
|
||||||
baseImage String?
|
baseImage String?
|
||||||
baseBuildImage String?
|
baseBuildImage String?
|
||||||
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
|
||||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
|
||||||
persistentStorage ApplicationPersistentStorage[]
|
|
||||||
settings ApplicationSettings?
|
settings ApplicationSettings?
|
||||||
|
dockerRegistryId String?
|
||||||
|
dockerRegistryImageName String?
|
||||||
|
simpleDockerfile String?
|
||||||
|
|
||||||
|
persistentStorage ApplicationPersistentStorage[]
|
||||||
secrets Secret[]
|
secrets Secret[]
|
||||||
teams Team[]
|
teams Team[]
|
||||||
connectedDatabase ApplicationConnectedDatabase?
|
connectedDatabase ApplicationConnectedDatabase?
|
||||||
previewApplication PreviewApplication[]
|
previewApplication PreviewApplication[]
|
||||||
|
gitSource GitSource? @relation(fields: [gitSourceId], references: [id])
|
||||||
|
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||||
|
dockerRegistry DockerRegistry? @relation(fields: [dockerRegistryId], references: [id])
|
||||||
}
|
}
|
||||||
|
|
||||||
model PreviewApplication {
|
model PreviewApplication {
|
||||||
@@ -186,6 +195,7 @@ model ApplicationPersistentStorage {
|
|||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
applicationId String
|
applicationId String
|
||||||
path String
|
path String
|
||||||
|
oldPath Boolean @default(false)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
application Application @relation(fields: [applicationId], references: [id])
|
application Application @relation(fields: [applicationId], references: [id])
|
||||||
@@ -294,6 +304,19 @@ model SshKey {
|
|||||||
destinationDocker DestinationDocker[]
|
destinationDocker DestinationDocker[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model DockerRegistry {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
name String
|
||||||
|
url String
|
||||||
|
username String?
|
||||||
|
password String?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
teamId String?
|
||||||
|
team Team? @relation(fields: [teamId], references: [id])
|
||||||
|
application Application[]
|
||||||
|
}
|
||||||
|
|
||||||
model GitSource {
|
model GitSource {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String
|
name String
|
||||||
@@ -624,8 +647,8 @@ model GlitchTip {
|
|||||||
emailSmtpPort Int? @default(25)
|
emailSmtpPort Int? @default(25)
|
||||||
emailSmtpUser String?
|
emailSmtpUser String?
|
||||||
emailSmtpPassword String?
|
emailSmtpPassword String?
|
||||||
emailSmtpUseTls Boolean? @default(false)
|
emailSmtpUseTls Boolean @default(false)
|
||||||
emailSmtpUseSsl Boolean? @default(false)
|
emailSmtpUseSsl Boolean @default(false)
|
||||||
emailBackend String?
|
emailBackend String?
|
||||||
mailgunApiKey String?
|
mailgunApiKey String?
|
||||||
sendgridApiKey String?
|
sendgridApiKey String?
|
||||||
|
|||||||
@@ -1,18 +1,8 @@
|
|||||||
const dotEnvExtended = require('dotenv-extended');
|
const dotEnvExtended = require('dotenv-extended');
|
||||||
dotEnvExtended.load();
|
dotEnvExtended.load();
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const generator = require('generate-password');
|
|
||||||
const cuid = require('cuid');
|
|
||||||
const { PrismaClient } = require('@prisma/client');
|
const { PrismaClient } = require('@prisma/client');
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
function generatePassword(length = 24) {
|
|
||||||
return generator.generate({
|
|
||||||
length,
|
|
||||||
numbers: true,
|
|
||||||
strict: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const algorithm = 'aes-256-ctr';
|
const algorithm = 'aes-256-ctr';
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
@@ -21,11 +11,8 @@ async function main() {
|
|||||||
if (!settingsFound) {
|
if (!settingsFound) {
|
||||||
await prisma.setting.create({
|
await prisma.setting.create({
|
||||||
data: {
|
data: {
|
||||||
isRegistrationEnabled: true,
|
id: '0',
|
||||||
proxyPassword: encrypt(generatePassword()),
|
|
||||||
proxyUser: cuid(),
|
|
||||||
arch: process.arch,
|
arch: process.arch,
|
||||||
DNSServers: '1.1.1.1,8.8.8.8'
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@@ -34,11 +21,11 @@ async function main() {
|
|||||||
id: settingsFound.id
|
id: settingsFound.id
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
isTraefikUsed: true,
|
id: '0'
|
||||||
proxyHash: null
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Create local docker engine
|
||||||
const localDocker = await prisma.destinationDocker.findFirst({
|
const localDocker = await prisma.destinationDocker.findFirst({
|
||||||
where: { engine: '/var/run/docker.sock' }
|
where: { engine: '/var/run/docker.sock' }
|
||||||
});
|
});
|
||||||
@@ -55,23 +42,18 @@ async function main() {
|
|||||||
|
|
||||||
// Set auto-update based on env variable
|
// Set auto-update based on env variable
|
||||||
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
const isAutoUpdateEnabled = process.env['COOLIFY_AUTO_UPDATE'] === 'true';
|
||||||
const settings = await prisma.setting.findFirst({});
|
|
||||||
if (settings) {
|
|
||||||
await prisma.setting.update({
|
await prisma.setting.update({
|
||||||
where: {
|
where: {
|
||||||
id: settings.id
|
id: '0'
|
||||||
},
|
},
|
||||||
data: {
|
data: {
|
||||||
isAutoUpdateEnabled
|
isAutoUpdateEnabled
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
// Create public github source
|
||||||
const github = await prisma.gitSource.findFirst({
|
const github = await prisma.gitSource.findFirst({
|
||||||
where: { htmlUrl: 'https://github.com', forPublic: true }
|
where: { htmlUrl: 'https://github.com', forPublic: true }
|
||||||
});
|
});
|
||||||
const gitlab = await prisma.gitSource.findFirst({
|
|
||||||
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
|
|
||||||
});
|
|
||||||
if (!github) {
|
if (!github) {
|
||||||
await prisma.gitSource.create({
|
await prisma.gitSource.create({
|
||||||
data: {
|
data: {
|
||||||
@@ -83,6 +65,10 @@ async function main() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Create public gitlab source
|
||||||
|
const gitlab = await prisma.gitSource.findFirst({
|
||||||
|
where: { htmlUrl: 'https://gitlab.com', forPublic: true }
|
||||||
|
});
|
||||||
if (!gitlab) {
|
if (!gitlab) {
|
||||||
await prisma.gitSource.create({
|
await prisma.gitSource.create({
|
||||||
data: {
|
data: {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import autoLoad from '@fastify/autoload';
|
|||||||
import socketIO from 'fastify-socket.io'
|
import socketIO from 'fastify-socket.io'
|
||||||
import socketIOServer from './realtime'
|
import socketIOServer from './realtime'
|
||||||
|
|
||||||
import { asyncExecShell, cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, encrypt, executeDockerCmd, executeSSHCmd, generateDatabaseConfiguration, isDev, listSettings, prisma, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
|
import { cleanupDockerStorage, createRemoteEngineConfiguration, decrypt, executeCommand, generateDatabaseConfiguration, isDev, listSettings, prisma, sentryDSN, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common';
|
||||||
import { scheduler } from './lib/scheduler';
|
import { scheduler } from './lib/scheduler';
|
||||||
import { compareVersions } from 'compare-versions';
|
import { compareVersions } from 'compare-versions';
|
||||||
import Graceful from '@ladjs/graceful'
|
import Graceful from '@ladjs/graceful'
|
||||||
@@ -17,16 +17,15 @@ import yaml from 'js-yaml'
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
|
import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers';
|
||||||
import { checkContainer } from './lib/docker';
|
import { checkContainer } from './lib/docker';
|
||||||
import { migrateServicesToNewTemplate } from './lib';
|
import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib';
|
||||||
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
|
import { refreshTags, refreshTemplates } from './routes/api/v1/handlers';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
declare module 'fastify' {
|
declare module 'fastify' {
|
||||||
interface FastifyInstance {
|
interface FastifyInstance {
|
||||||
config: {
|
config: {
|
||||||
COOLIFY_APP_ID: string,
|
COOLIFY_APP_ID: string,
|
||||||
COOLIFY_SECRET_KEY: string,
|
COOLIFY_SECRET_KEY: string,
|
||||||
COOLIFY_DATABASE_URL: string,
|
COOLIFY_DATABASE_URL: string,
|
||||||
COOLIFY_SENTRY_DSN: string,
|
|
||||||
COOLIFY_IS_ON: string,
|
COOLIFY_IS_ON: string,
|
||||||
COOLIFY_WHITE_LABELED: string,
|
COOLIFY_WHITE_LABELED: string,
|
||||||
COOLIFY_WHITE_LABELED_ICON: string | null,
|
COOLIFY_WHITE_LABELED_ICON: string | null,
|
||||||
@@ -37,6 +36,7 @@ declare module 'fastify' {
|
|||||||
|
|
||||||
const port = isDev ? 3001 : 3000;
|
const port = isDev ? 3001 : 3000;
|
||||||
const host = '0.0.0.0';
|
const host = '0.0.0.0';
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
const settings = await prisma.setting.findFirst()
|
const settings = await prisma.setting.findFirst()
|
||||||
const fastify = Fastify({
|
const fastify = Fastify({
|
||||||
@@ -58,10 +58,6 @@ const host = '0.0.0.0';
|
|||||||
type: 'string',
|
type: 'string',
|
||||||
default: 'file:../db/dev.db'
|
default: 'file:../db/dev.db'
|
||||||
},
|
},
|
||||||
COOLIFY_SENTRY_DSN: {
|
|
||||||
type: 'string',
|
|
||||||
default: null
|
|
||||||
},
|
|
||||||
COOLIFY_IS_ON: {
|
COOLIFY_IS_ON: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
default: 'docker'
|
default: 'docker'
|
||||||
@@ -114,7 +110,6 @@ const host = '0.0.0.0';
|
|||||||
origin: isDev ? "*" : ''
|
origin: isDev ? "*" : ''
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
// To detect allowed origins
|
// To detect allowed origins
|
||||||
// fastify.addHook('onRequest', async (request, reply) => {
|
// fastify.addHook('onRequest', async (request, reply) => {
|
||||||
// console.log(request.headers.host)
|
// console.log(request.headers.host)
|
||||||
@@ -142,7 +137,8 @@ const host = '0.0.0.0';
|
|||||||
await socketIOServer(fastify)
|
await socketIOServer(fastify)
|
||||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||||
|
|
||||||
migrateServicesToNewTemplate()
|
migrateServicesToNewTemplate();
|
||||||
|
await migrateApplicationPersistentStorage();
|
||||||
await initServer();
|
await initServer();
|
||||||
|
|
||||||
const graceful = new Graceful({ brees: [scheduler] });
|
const graceful = new Graceful({ brees: [scheduler] });
|
||||||
@@ -181,7 +177,7 @@ const host = '0.0.0.0';
|
|||||||
|
|
||||||
setInterval(async () => {
|
setInterval(async () => {
|
||||||
await migrateServicesToNewTemplate()
|
await migrateServicesToNewTemplate()
|
||||||
}, 60000)
|
}, isDev ? 10000 : 60000)
|
||||||
|
|
||||||
setInterval(async () => {
|
setInterval(async () => {
|
||||||
await copySSLCertificates();
|
await copySSLCertificates();
|
||||||
@@ -206,14 +202,14 @@ async function getIPAddress() {
|
|||||||
try {
|
try {
|
||||||
const settings = await listSettings();
|
const settings = await listSettings();
|
||||||
if (!settings.ipv4) {
|
if (!settings.ipv4) {
|
||||||
console.log(`Getting public IPv4 address...`);
|
|
||||||
const ipv4 = await publicIpv4({ timeout: 2000 })
|
const ipv4 = await publicIpv4({ timeout: 2000 })
|
||||||
|
console.log(`Getting public IPv4 address...`);
|
||||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
|
await prisma.setting.update({ where: { id: settings.id }, data: { ipv4 } })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!settings.ipv6) {
|
if (!settings.ipv6) {
|
||||||
console.log(`Getting public IPv6 address...`);
|
|
||||||
const ipv6 = await publicIpv6({ timeout: 2000 })
|
const ipv6 = await publicIpv6({ timeout: 2000 })
|
||||||
|
console.log(`Getting public IPv6 address...`);
|
||||||
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
|
await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -227,13 +223,13 @@ async function getTagsTemplates() {
|
|||||||
const tags = await fs.readFile('./devTags.json', 'utf8')
|
const tags = await fs.readFile('./devTags.json', 'utf8')
|
||||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)))
|
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)))
|
||||||
await fs.writeFile('./tags.json', tags)
|
await fs.writeFile('./tags.json', tags)
|
||||||
console.log('Tags and templates loaded in dev mode...')
|
console.log('[004] Tags and templates loaded in dev mode...')
|
||||||
} else {
|
} else {
|
||||||
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
||||||
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
||||||
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
||||||
await fs.writeFile('/app/tags.json', tags)
|
await fs.writeFile('/app/tags.json', tags)
|
||||||
console.log('Tags and templates loaded...')
|
console.log('[004] Tags and templates loaded...')
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -242,16 +238,44 @@ async function getTagsTemplates() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function initServer() {
|
async function initServer() {
|
||||||
|
const appId = process.env['COOLIFY_APP_ID'];
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } })
|
||||||
try {
|
try {
|
||||||
console.log(`Initializing server...`);
|
if (settings.doNotTrack === true) {
|
||||||
await asyncExecShell(`docker network create --attachable coolify`);
|
console.log('[000] Telemetry disabled...')
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if (settings.sentryDSN !== sentryDSN) {
|
||||||
|
await prisma.setting.update({ where: { id: '0' }, data: { sentryDSN } })
|
||||||
|
}
|
||||||
|
// Initialize Sentry
|
||||||
|
// Sentry.init({
|
||||||
|
// dsn: sentryDSN,
|
||||||
|
// environment: isDev ? 'development' : 'production',
|
||||||
|
// release: version
|
||||||
|
// });
|
||||||
|
// console.log('[000] Sentry initialized...')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
console.log(`[001] Initializing server...`);
|
||||||
|
await executeCommand({ command: `docker network create --attachable coolify` });
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
try {
|
try {
|
||||||
|
console.log(`[002] Cleanup stucked builds...`);
|
||||||
const isOlder = compareVersions('3.8.1', version);
|
const isOlder = compareVersions('3.8.1', version);
|
||||||
if (isOlder === 1) {
|
if (isOlder === 1) {
|
||||||
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
|
await prisma.build.updateMany({ where: { status: { in: ['running', 'queued'] } }, data: { status: 'failed' } });
|
||||||
}
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
try {
|
||||||
|
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
||||||
|
await fs.rm('/tmp/build-sources', { recursive: true, force: true })
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getArch() {
|
async function getArch() {
|
||||||
@@ -299,14 +323,10 @@ async function autoUpdater() {
|
|||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
if (isAutoUpdateEnabled) {
|
if (isAutoUpdateEnabled) {
|
||||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` })
|
||||||
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
|
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` })
|
||||||
await asyncExecShell(
|
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` })
|
||||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` })
|
||||||
);
|
|
||||||
await asyncExecShell(
|
|
||||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.log('Updating (not really in dev mode).');
|
console.log('Updating (not really in dev mode).');
|
||||||
@@ -327,8 +347,8 @@ async function checkFluentBit() {
|
|||||||
});
|
});
|
||||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
|
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit', remove: true });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
await asyncExecShell(`env | grep '^COOLIFY' > .env`);
|
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
||||||
await asyncExecShell(`docker compose up -d fluent-bit`);
|
await executeCommand({ command: `docker compose up -d fluent-bit` });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -438,25 +458,25 @@ async function copySSLCertificates() {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
} finally {
|
} finally {
|
||||||
await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`)
|
await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`)
|
await executeCommand({ command: `scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
await executeCommand({ sshCommand: true, shell: true, dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
await executeCommand({ sshCommand: true, dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log({ error })
|
console.log({ error })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function copyLocalCertificates(id: string) {
|
async function copyLocalCertificates(id: string) {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`)
|
await executeCommand({ command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`, shell: true })
|
||||||
await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
await executeCommand({ command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
await executeCommand({ command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log({ error })
|
console.log({ error })
|
||||||
}
|
}
|
||||||
@@ -474,12 +494,13 @@ async function cleanupStorage() {
|
|||||||
try {
|
try {
|
||||||
let stdout = null
|
let stdout = null
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const output = await executeDockerCmd({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'` })
|
const output = await executeCommand({ dockerId: destination.id, command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`, shell: true })
|
||||||
stdout = output.stdout;
|
stdout = output.stdout;
|
||||||
} else {
|
} else {
|
||||||
const output = await asyncExecShell(
|
const output = await executeCommand({
|
||||||
|
command:
|
||||||
`df -kPT /`
|
`df -kPT /`
|
||||||
);
|
});
|
||||||
stdout = output.stdout;
|
stdout = output.stdout;
|
||||||
}
|
}
|
||||||
let lines = stdout.trim().split('\n');
|
let lines = stdout.trim().split('\n');
|
||||||
|
|||||||
@@ -3,8 +3,25 @@ import crypto from 'crypto';
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
import { copyBaseConfigurationFiles, makeLabelForStandaloneApplication, saveBuildLog, setDefaultConfiguration } from '../lib/buildPacks/common';
|
import {
|
||||||
import { createDirectories, decrypt, defaultComposeConfiguration, executeDockerCmd, getDomain, prisma, decryptApplication } from '../lib/common';
|
copyBaseConfigurationFiles,
|
||||||
|
makeLabelForSimpleDockerfile,
|
||||||
|
makeLabelForStandaloneApplication,
|
||||||
|
saveBuildLog,
|
||||||
|
saveDockerRegistryCredentials,
|
||||||
|
setDefaultConfiguration
|
||||||
|
} from '../lib/buildPacks/common';
|
||||||
|
import {
|
||||||
|
createDirectories,
|
||||||
|
decrypt,
|
||||||
|
defaultComposeConfiguration,
|
||||||
|
getDomain,
|
||||||
|
prisma,
|
||||||
|
decryptApplication,
|
||||||
|
isDev,
|
||||||
|
pushToRegistry,
|
||||||
|
executeCommand
|
||||||
|
} from '../lib/common';
|
||||||
import * as importers from '../lib/importers';
|
import * as importers from '../lib/importers';
|
||||||
import * as buildpacks from '../lib/buildPacks';
|
import * as buildpacks from '../lib/buildPacks';
|
||||||
|
|
||||||
@@ -14,44 +31,260 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (message === 'error') throw new Error('oops');
|
if (message === 'error') throw new Error('oops');
|
||||||
if (message === 'cancel') {
|
if (message === 'cancel') {
|
||||||
parentPort.postMessage('cancelled');
|
parentPort.postMessage('cancelled');
|
||||||
await prisma.$disconnect()
|
await prisma.$disconnect();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const pThrottle = await import('p-throttle')
|
const pThrottle = await import('p-throttle');
|
||||||
const throttle = pThrottle.default({
|
const throttle = pThrottle.default({
|
||||||
limit: 1,
|
limit: 1,
|
||||||
interval: 2000
|
interval: 2000
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
const th = throttle(async () => {
|
const th = throttle(async () => {
|
||||||
try {
|
try {
|
||||||
const queuedBuilds = await prisma.build.findMany({ where: { status: { in: ['queued', 'running'] } }, orderBy: { createdAt: 'asc' } });
|
const queuedBuilds = await prisma.build.findMany({
|
||||||
const { concurrentBuilds } = await prisma.setting.findFirst({})
|
where: { status: { in: ['queued', 'running'] } },
|
||||||
|
orderBy: { createdAt: 'asc' }
|
||||||
|
});
|
||||||
|
const { concurrentBuilds } = await prisma.setting.findFirst({});
|
||||||
if (queuedBuilds.length > 0) {
|
if (queuedBuilds.length > 0) {
|
||||||
parentPort.postMessage({ deploying: true });
|
parentPort.postMessage({ deploying: true });
|
||||||
const concurrency = concurrentBuilds;
|
const concurrency = concurrentBuilds;
|
||||||
const pAll = await import('p-all');
|
const pAll = await import('p-all');
|
||||||
const actions = []
|
const actions = [];
|
||||||
|
|
||||||
for (const queueBuild of queuedBuilds) {
|
for (const queueBuild of queuedBuilds) {
|
||||||
actions.push(async () => {
|
actions.push(async () => {
|
||||||
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
let application = await prisma.application.findUnique({
|
||||||
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild, sourceRepository = null } = queueBuild
|
where: { id: queueBuild.applicationId },
|
||||||
application = decryptApplication(application)
|
include: {
|
||||||
const originalApplicationId = application.id
|
dockerRegistry: true,
|
||||||
if (pullmergeRequestId) {
|
destinationDocker: true,
|
||||||
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
|
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||||
if (previewApplications.length > 0) {
|
persistentStorage: true,
|
||||||
previewApplicationId = previewApplications[0].id
|
secrets: true,
|
||||||
|
settings: true,
|
||||||
|
teams: true
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
const usableApplicationId = previewApplicationId || originalApplicationId
|
|
||||||
|
let {
|
||||||
|
id: buildId,
|
||||||
|
type,
|
||||||
|
gitSourceId,
|
||||||
|
sourceBranch = null,
|
||||||
|
pullmergeRequestId = null,
|
||||||
|
previewApplicationId = null,
|
||||||
|
forceRebuild,
|
||||||
|
sourceRepository = null
|
||||||
|
} = queueBuild;
|
||||||
|
application = decryptApplication(application);
|
||||||
|
|
||||||
|
if (!gitSourceId && application.simpleDockerfile) {
|
||||||
|
const {
|
||||||
|
id: applicationId,
|
||||||
|
destinationDocker,
|
||||||
|
destinationDockerId,
|
||||||
|
secrets,
|
||||||
|
port,
|
||||||
|
persistentStorage,
|
||||||
|
exposePort,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistry
|
||||||
|
} = application;
|
||||||
|
const { workdir } = await createDirectories({ repository: applicationId, buildId });
|
||||||
try {
|
try {
|
||||||
if (queueBuild.status === 'running') {
|
if (queueBuild.status === 'running') {
|
||||||
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
await saveBuildLog({
|
||||||
|
line: 'Building halted, restarting...',
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
const volumes =
|
||||||
|
persistentStorage?.map((storage) => {
|
||||||
|
if (storage.oldPath) {
|
||||||
|
return `${applicationId}${storage.path
|
||||||
|
.replace(/\//gi, '-')
|
||||||
|
.replace('-app', '')}:${storage.path}`;
|
||||||
|
}
|
||||||
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
|
}) || [];
|
||||||
|
|
||||||
|
if (destinationDockerId) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: { status: 'running' }
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const { stdout: containers } = await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker ps -a --filter 'label=com.docker.compose.service=${applicationId}' --format {{.ID}}`
|
||||||
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker stop -t 0 ${container}`
|
||||||
|
});
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker rm --force ${container}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
const envs = [`PORT='${port}'`];
|
||||||
|
if (secrets.length > 0) {
|
||||||
|
secrets.forEach((secret) => {
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
const isSecretFound = secrets.filter(
|
||||||
|
(s) => s.name === secret.name && s.isPRMRSecret
|
||||||
|
);
|
||||||
|
if (isSecretFound.length > 0) {
|
||||||
|
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||||
|
} else {
|
||||||
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!secret.isPRMRSecret) {
|
||||||
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
let envFound = false;
|
||||||
|
try {
|
||||||
|
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.writeFile(`${workdir}/Dockerfile`, simpleDockerfile);
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry;
|
||||||
|
await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||||
|
}
|
||||||
|
|
||||||
|
const labels = makeLabelForSimpleDockerfile({
|
||||||
|
applicationId,
|
||||||
|
type,
|
||||||
|
port: exposePort ? `${exposePort}:${port}` : port
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const composeVolumes = volumes.map((volume) => {
|
||||||
|
return {
|
||||||
|
[`${volume.split(':')[0]}`]: {
|
||||||
|
name: volume.split(':')[0]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const composeFile = {
|
||||||
|
version: '3.8',
|
||||||
|
services: {
|
||||||
|
[applicationId]: {
|
||||||
|
build: {
|
||||||
|
context: workdir
|
||||||
|
},
|
||||||
|
image: `${applicationId}:${buildId}`,
|
||||||
|
container_name: applicationId,
|
||||||
|
volumes,
|
||||||
|
labels,
|
||||||
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
|
depends_on: [],
|
||||||
|
expose: [port],
|
||||||
|
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||||
|
...defaultComposeConfiguration(destinationDocker.network)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
networks: {
|
||||||
|
[destinationDocker.network]: {
|
||||||
|
external: true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
|
};
|
||||||
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
|
await executeCommand({
|
||||||
|
debug: true,
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker compose --project-directory ${workdir} up -d`
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
|
} catch (error) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
|
if (foundBuild) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: {
|
||||||
|
status: 'failed'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
|
if (foundBuild) {
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: {
|
||||||
|
status: 'failed'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (error !== 1) {
|
||||||
|
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
if (error instanceof Error) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: error.message,
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (application.dockerRegistryImageName) {
|
||||||
|
const customTag = application.dockerRegistryImageName.split(':')[1] || buildId;
|
||||||
|
const imageName = application.dockerRegistryImageName.split(':')[0];
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
|
await pushToRegistry(application, workdir, buildId, imageName, customTag);
|
||||||
|
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.stdout) {
|
||||||
|
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||||
|
}
|
||||||
|
if (error.stderr) {
|
||||||
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: { status: 'success' }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const originalApplicationId = application.id;
|
||||||
const {
|
const {
|
||||||
id: applicationId,
|
id: applicationId,
|
||||||
name,
|
name,
|
||||||
@@ -73,7 +306,10 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
baseImage,
|
baseImage,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
} = application
|
gitCommitHash,
|
||||||
|
dockerRegistry
|
||||||
|
} = application;
|
||||||
|
|
||||||
let {
|
let {
|
||||||
branch,
|
branch,
|
||||||
repository,
|
repository,
|
||||||
@@ -85,9 +321,46 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
baseDirectory,
|
baseDirectory,
|
||||||
publishDirectory,
|
publishDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
} = application
|
} = application;
|
||||||
|
|
||||||
|
let imageId = applicationId;
|
||||||
|
let domain = getDomain(fqdn);
|
||||||
|
|
||||||
|
let location = null;
|
||||||
|
|
||||||
|
let tag = null;
|
||||||
|
let customTag = null;
|
||||||
|
let imageName = null;
|
||||||
|
|
||||||
|
let imageFoundLocally = false;
|
||||||
|
let imageFoundRemotely = false;
|
||||||
|
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
const previewApplications = await prisma.previewApplication.findMany({
|
||||||
|
where: { applicationId: originalApplicationId, pullmergeRequestId }
|
||||||
|
});
|
||||||
|
if (previewApplications.length > 0) {
|
||||||
|
previewApplicationId = previewApplications[0].id;
|
||||||
|
}
|
||||||
|
// Previews, we need to get the source branch and set subdomain
|
||||||
|
branch = sourceBranch;
|
||||||
|
domain = `${pullmergeRequestId}.${domain}`;
|
||||||
|
imageId = `${applicationId}-${pullmergeRequestId}`;
|
||||||
|
repository = sourceRepository || repository;
|
||||||
|
}
|
||||||
|
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
||||||
|
try {
|
||||||
|
if (queueBuild.status === 'running') {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: 'Building halted, restarting...',
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const currentHash = crypto
|
const currentHash = crypto
|
||||||
.createHash('sha256')
|
.createHash('sha256')
|
||||||
.update(
|
.update(
|
||||||
@@ -113,23 +386,25 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
)
|
)
|
||||||
.digest('hex');
|
.digest('hex');
|
||||||
const { debug } = settings;
|
const { debug } = settings;
|
||||||
let imageId = applicationId;
|
if (!debug) {
|
||||||
let domain = getDomain(fqdn);
|
await saveBuildLog({
|
||||||
|
line: `Debug logging is disabled. Enable it above if necessary!`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
const volumes =
|
const volumes =
|
||||||
persistentStorage?.map((storage) => {
|
persistentStorage?.map((storage) => {
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
if (storage.oldPath) {
|
||||||
}${storage.path}`;
|
return `${applicationId}${storage.path
|
||||||
}) || [];
|
.replace(/\//gi, '-')
|
||||||
// Previews, we need to get the source branch and set subdomain
|
.replace('-app', '')}:${storage.path}`;
|
||||||
if (pullmergeRequestId) {
|
|
||||||
branch = sourceBranch;
|
|
||||||
domain = `${pullmergeRequestId}.${domain}`;
|
|
||||||
imageId = `${applicationId}-${pullmergeRequestId}`;
|
|
||||||
repository = sourceRepository || repository;
|
|
||||||
}
|
}
|
||||||
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
|
}) || [];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration)
|
dockerComposeConfiguration = JSON.parse(dockerComposeConfiguration);
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
let deployNeeded = true;
|
let deployNeeded = true;
|
||||||
let destinationType;
|
let destinationType;
|
||||||
@@ -138,8 +413,11 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
destinationType = 'docker';
|
destinationType = 'docker';
|
||||||
}
|
}
|
||||||
if (destinationType === 'docker') {
|
if (destinationType === 'docker') {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'running' } });
|
await prisma.build.update({
|
||||||
const { workdir, repodir } = await createDirectories({ repository, buildId });
|
where: { id: buildId },
|
||||||
|
data: { status: 'running' }
|
||||||
|
});
|
||||||
|
|
||||||
const configuration = await setDefaultConfiguration(application);
|
const configuration = await setDefaultConfiguration(application);
|
||||||
|
|
||||||
buildPack = configuration.buildPack;
|
buildPack = configuration.buildPack;
|
||||||
@@ -150,6 +428,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
publishDirectory = configuration.publishDirectory;
|
publishDirectory = configuration.publishDirectory;
|
||||||
baseDirectory = configuration.baseDirectory || '';
|
baseDirectory = configuration.baseDirectory || '';
|
||||||
dockerFileLocation = configuration.dockerFileLocation;
|
dockerFileLocation = configuration.dockerFileLocation;
|
||||||
|
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||||
denoMainFile = configuration.denoMainFile;
|
denoMainFile = configuration.denoMainFile;
|
||||||
const commit = await importers[gitSource.type]({
|
const commit = await importers[gitSource.type]({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -159,6 +438,8 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
githubAppId: gitSource.githubApp?.id,
|
githubAppId: gitSource.githubApp?.id,
|
||||||
gitlabAppId: gitSource.gitlabApp?.id,
|
gitlabAppId: gitSource.gitlabApp?.id,
|
||||||
customPort: gitSource.customPort,
|
customPort: gitSource.customPort,
|
||||||
|
gitCommitHash,
|
||||||
|
configuration,
|
||||||
repository,
|
repository,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@@ -172,10 +453,21 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (!commit) {
|
if (!commit) {
|
||||||
throw new Error('No commit found?');
|
throw new Error('No commit found?');
|
||||||
}
|
}
|
||||||
let tag = commit.slice(0, 7);
|
tag = commit.slice(0, 7);
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
tag = `${commit.slice(0, 7)}-${pullmergeRequestId}`;
|
||||||
}
|
}
|
||||||
|
if (application.dockerRegistryImageName) {
|
||||||
|
imageName = application.dockerRegistryImageName.split(':')[0];
|
||||||
|
customTag = application.dockerRegistryImageName.split(':')[1] || tag;
|
||||||
|
} else {
|
||||||
|
customTag = tag;
|
||||||
|
imageName = applicationId;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pullmergeRequestId) {
|
||||||
|
customTag = `${customTag}-${pullmergeRequestId}`;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||||
@@ -185,7 +477,11 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (configHash !== currentHash) {
|
if (configHash !== currentHash) {
|
||||||
deployNeeded = true;
|
deployNeeded = true;
|
||||||
if (configHash) {
|
if (configHash) {
|
||||||
await saveBuildLog({ line: 'Configuration changed.', buildId, applicationId });
|
await saveBuildLog({
|
||||||
|
line: 'Configuration changed',
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
deployNeeded = false;
|
deployNeeded = false;
|
||||||
@@ -194,17 +490,47 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
deployNeeded = true;
|
deployNeeded = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
let imageFound = false;
|
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker image inspect ${applicationId}:${tag}`
|
command: `docker image inspect ${applicationId}:${tag}`
|
||||||
})
|
});
|
||||||
imageFound = true;
|
imageFoundLocally = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry;
|
||||||
|
location = await saveDockerRegistryCredentials({
|
||||||
|
url,
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
workdir
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker ${
|
||||||
|
location ? `--config ${location}` : ''
|
||||||
|
} pull ${imageName}:${customTag}`
|
||||||
|
});
|
||||||
|
imageFoundRemotely = true;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
let imageFound = `${applicationId}:${tag}`;
|
||||||
|
if (imageFoundRemotely) {
|
||||||
|
imageFound = `${imageName}:${customTag}`;
|
||||||
|
}
|
||||||
|
await copyBaseConfigurationFiles(
|
||||||
|
buildPack,
|
||||||
|
workdir,
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
baseImage
|
||||||
|
);
|
||||||
const labels = makeLabelForStandaloneApplication({
|
const labels = makeLabelForStandaloneApplication({
|
||||||
applicationId,
|
applicationId,
|
||||||
fqdn,
|
fqdn,
|
||||||
@@ -223,8 +549,8 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
baseDirectory,
|
baseDirectory,
|
||||||
publishDirectory
|
publishDirectory
|
||||||
});
|
});
|
||||||
if (forceRebuild) deployNeeded = true
|
if (forceRebuild) deployNeeded = true;
|
||||||
if (!imageFound || deployNeeded) {
|
if ((!imageFoundLocally && !imageFoundRemotely) || deployNeeded) {
|
||||||
if (buildpacks[buildPack])
|
if (buildpacks[buildPack])
|
||||||
await buildpacks[buildPack]({
|
await buildpacks[buildPack]({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
@@ -258,45 +584,84 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
pythonVariable,
|
pythonVariable,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile,
|
denoMainFile,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
|
forceRebuild
|
||||||
});
|
});
|
||||||
else {
|
else {
|
||||||
await saveBuildLog({ line: `Build pack ${buildPack} not found`, buildId, applicationId });
|
await saveBuildLog({
|
||||||
|
line: `Build pack ${buildPack} not found`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
throw new Error(`Build pack ${buildPack} not found.`);
|
throw new Error(`Build pack ${buildPack} not found.`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
await saveBuildLog({ line: 'Build image already available - no rebuild required.', buildId, applicationId });
|
if (imageFoundRemotely || deployNeeded) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Container image ${imageFound} found in Docker Registry - reuising it`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
if (imageFoundLocally || deployNeeded) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Container image ${imageFound} found locally - reuising it`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (buildPack === 'compose') {
|
if (buildPack === 'compose') {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}`
|
||||||
})
|
});
|
||||||
await executeDockerCmd({
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=coolify.applicationId=${applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
command: `docker stop -t 0 ${container}`
|
||||||
})
|
});
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker rm --force ${container}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({
|
||||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
debug,
|
||||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
buildId,
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
applicationId,
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker compose --project-directory ${workdir} up -d`
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
|
await prisma.build.update({
|
||||||
|
where: { id: buildId },
|
||||||
|
data: { status: 'success' }
|
||||||
|
});
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id: applicationId },
|
where: { id: applicationId },
|
||||||
data: { configHash: currentHash }
|
data: { configHash: currentHash }
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await saveBuildLog({ line: error, buildId, applicationId });
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
if (foundBuild) {
|
if (foundBuild) {
|
||||||
await prisma.build.update({
|
await prisma.build.update({
|
||||||
where: { id: buildId },
|
where: { id: buildId },
|
||||||
@@ -307,40 +672,56 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
}
|
}
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.service=${
|
||||||
})
|
pullmergeRequestId ? imageId : applicationId
|
||||||
await executeDockerCmd({
|
}' --format {{.ID}}`
|
||||||
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${pullmergeRequestId ? imageId : applicationId}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
command: `docker stop -t 0 ${container}`
|
||||||
})
|
});
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: destinationDockerId,
|
||||||
|
command: `docker rm --force ${container}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
const envs = [
|
const envs = [`PORT='${port}'`];
|
||||||
`PORT=${port}`
|
|
||||||
];
|
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(
|
||||||
|
(s) => s.name === secret.name && s.isPRMRSecret
|
||||||
|
);
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry;
|
||||||
|
await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||||
|
}
|
||||||
|
|
||||||
let envFound = false;
|
let envFound = false;
|
||||||
try {
|
try {
|
||||||
@@ -349,7 +730,6 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
//
|
//
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
await saveBuildLog({ line: 'Deployment started.', buildId, applicationId });
|
|
||||||
const composeVolumes = volumes.map((volume) => {
|
const composeVolumes = volumes.map((volume) => {
|
||||||
return {
|
return {
|
||||||
[`${volume.split(':')[0]}`]: {
|
[`${volume.split(':')[0]}`]: {
|
||||||
@@ -361,7 +741,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
version: '3.8',
|
version: '3.8',
|
||||||
services: {
|
services: {
|
||||||
[imageId]: {
|
[imageId]: {
|
||||||
image: `${applicationId}:${tag}`,
|
image: imageFound,
|
||||||
container_name: imageId,
|
container_name: imageId,
|
||||||
volumes,
|
volumes,
|
||||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||||
@@ -369,7 +749,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
depends_on: [],
|
depends_on: [],
|
||||||
expose: [port],
|
expose: [port],
|
||||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||||
...defaultComposeConfiguration(destinationDocker.network),
|
...defaultComposeConfiguration(destinationDocker.network)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
networks: {
|
networks: {
|
||||||
@@ -380,11 +760,15 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({
|
||||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
debug,
|
||||||
|
dockerId: destinationDocker.id,
|
||||||
|
command: `docker compose --project-directory ${workdir} up -d`
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await saveBuildLog({ line: error, buildId, applicationId });
|
await saveBuildLog({ line: error, buildId, applicationId });
|
||||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
if (foundBuild) {
|
if (foundBuild) {
|
||||||
await prisma.build.update({
|
await prisma.build.update({
|
||||||
where: { id: buildId },
|
where: { id: buildId },
|
||||||
@@ -395,17 +779,16 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
}
|
}
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
if (!pullmergeRequestId)
|
||||||
if (!pullmergeRequestId) await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id: applicationId },
|
where: { id: applicationId },
|
||||||
data: { configHash: currentHash }
|
data: { configHash: currentHash }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} catch (error) {
|
||||||
catch (error) {
|
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
|
||||||
if (foundBuild) {
|
if (foundBuild) {
|
||||||
await prisma.build.update({
|
await prisma.build.update({
|
||||||
where: { id: buildId },
|
where: { id: buildId },
|
||||||
@@ -417,17 +800,47 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
if (error !== 1) {
|
if (error !== 1) {
|
||||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||||
}
|
}
|
||||||
|
if (error instanceof Error) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: error.message,
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (application.dockerRegistryImageName && (!imageFoundRemotely || forceRebuild)) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Pushing ${imageName}:${customTag} to Docker Registry... It could take a while...`,
|
||||||
|
buildId,
|
||||||
|
applicationId: application.id
|
||||||
|
});
|
||||||
|
await pushToRegistry(application, workdir, tag, imageName, customTag);
|
||||||
|
await saveBuildLog({ line: 'Success', buildId, applicationId: application.id });
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error.stdout) {
|
||||||
|
await saveBuildLog({ line: error.stdout, buildId, applicationId });
|
||||||
|
}
|
||||||
|
if (error.stderr) {
|
||||||
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await fs.rm(workdir, { recursive: true, force: true });
|
||||||
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await pAll.default(actions, { concurrency })
|
await pAll.default(actions, { concurrency });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error);
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
while (true) {
|
while (true) {
|
||||||
await th()
|
await th();
|
||||||
}
|
}
|
||||||
} else process.exit(0);
|
} else process.exit(0);
|
||||||
})();
|
})();
|
||||||
|
|||||||
@@ -1,7 +1,33 @@
|
|||||||
import cuid from "cuid";
|
import cuid from "cuid";
|
||||||
import { decrypt, encrypt, fixType, generatePassword, prisma } from "./lib/common";
|
import { decrypt, encrypt, fixType, generatePassword, generateToken, prisma } from "./lib/common";
|
||||||
import { getTemplates } from "./lib/services";
|
import { getTemplates } from "./lib/services";
|
||||||
|
|
||||||
|
export async function migrateApplicationPersistentStorage() {
|
||||||
|
const settings = await prisma.setting.findFirst()
|
||||||
|
if (settings) {
|
||||||
|
const { id: settingsId, applicationStoragePathMigrationFinished } = settings
|
||||||
|
try {
|
||||||
|
if (!applicationStoragePathMigrationFinished) {
|
||||||
|
const applications = await prisma.application.findMany({ include: { persistentStorage: true } });
|
||||||
|
for (const application of applications) {
|
||||||
|
if (application.persistentStorage && application.persistentStorage.length > 0 && application?.buildPack !== 'docker') {
|
||||||
|
for (const storage of application.persistentStorage) {
|
||||||
|
let { id, path } = storage
|
||||||
|
if (!path.startsWith('/app')) {
|
||||||
|
path = `/app${path}`
|
||||||
|
await prisma.applicationPersistentStorage.update({ where: { id }, data: { path, oldPath: true } })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error)
|
||||||
|
} finally {
|
||||||
|
await prisma.setting.update({ where: { id: settingsId }, data: { applicationStoragePathMigrationFinished: true } })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
export async function migrateServicesToNewTemplate() {
|
export async function migrateServicesToNewTemplate() {
|
||||||
// This function migrates old hardcoded services to the new template based services
|
// This function migrates old hardcoded services to the new template based services
|
||||||
try {
|
try {
|
||||||
@@ -57,7 +83,7 @@ export async function migrateServicesToNewTemplate() {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
}
|
}
|
||||||
|
if (template.variables) {
|
||||||
if (template.variables.length > 0) {
|
if (template.variables.length > 0) {
|
||||||
for (const variable of template.variables) {
|
for (const variable of template.variables) {
|
||||||
const { defaultValue } = variable;
|
const { defaultValue } = variable;
|
||||||
@@ -69,6 +95,8 @@ export async function migrateServicesToNewTemplate() {
|
|||||||
variable.value = generatePassword({ length, isHex: true });
|
variable.value = generatePassword({ length, isHex: true });
|
||||||
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
||||||
variable.value = cuid();
|
variable.value = cuid();
|
||||||
|
} else if (variable.defaultValue.startsWith('$$generate_token')) {
|
||||||
|
variable.value = generateToken()
|
||||||
} else {
|
} else {
|
||||||
variable.value = variable.defaultValue || '';
|
variable.value = variable.defaultValue || '';
|
||||||
}
|
}
|
||||||
@@ -93,6 +121,7 @@ export async function migrateServicesToNewTemplate() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (const s of Object.keys(template.services)) {
|
for (const s of Object.keys(template.services)) {
|
||||||
if (service.type === 'plausibleanalytics') {
|
if (service.type === 'plausibleanalytics') {
|
||||||
continue;
|
continue;
|
||||||
@@ -221,7 +250,7 @@ async function hasura(service: any, template: any) {
|
|||||||
const { id } = service
|
const { id } = service
|
||||||
|
|
||||||
const secrets = [
|
const secrets = [
|
||||||
`HASURA_GRAPHQL_ADMIN_PASSWORD@@@${graphQLAdminPassword}`,
|
`HASURA_GRAPHQL_ADMIN_SECRET@@@${graphQLAdminPassword}`,
|
||||||
`HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
`HASURA_GRAPHQL_METADATA_DATABASE_URL@@@${encrypt(`postgres://${postgresqlUser}:${decrypt(postgresqlPassword)}@${id}-postgresql:5432/${postgresqlDatabase}`)}`,
|
||||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||||
]
|
]
|
||||||
@@ -238,7 +267,6 @@ async function hasura(service: any, template: any) {
|
|||||||
async function umami(service: any, template: any) {
|
async function umami(service: any, template: any) {
|
||||||
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami
|
const { postgresqlUser, postgresqlPassword, postgresqlDatabase, umamiAdminPassword, hashSalt } = service.umami
|
||||||
const { id } = service
|
const { id } = service
|
||||||
|
|
||||||
const secrets = [
|
const secrets = [
|
||||||
`HASH_SALT@@@${hashSalt}`,
|
`HASH_SALT@@@${hashSalt}`,
|
||||||
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
`POSTGRES_PASSWORD@@@${postgresqlPassword}`,
|
||||||
@@ -439,7 +467,6 @@ async function plausibleAnalytics(service: any, template: any) {
|
|||||||
// Disconnect old service data
|
// Disconnect old service data
|
||||||
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
// await prisma.service.update({ where: { id: service.id }, data: { plausibleAnalytics: { disconnect: true } } })
|
||||||
}
|
}
|
||||||
|
|
||||||
async function migrateSettings(settings: any[], service: any, template: any) {
|
async function migrateSettings(settings: any[], service: any, template: any) {
|
||||||
for (const setting of settings) {
|
for (const setting of settings) {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1,6 +1,17 @@
|
|||||||
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
import {
|
||||||
|
base64Encode,
|
||||||
|
decrypt,
|
||||||
|
encrypt,
|
||||||
|
executeCommand,
|
||||||
|
generateTimestamp,
|
||||||
|
getDomain,
|
||||||
|
isARM,
|
||||||
|
isDev,
|
||||||
|
prisma,
|
||||||
|
version
|
||||||
|
} from '../common';
|
||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { day } from "../dayjs";
|
import { day } from '../dayjs';
|
||||||
|
|
||||||
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
||||||
const nodeBased = [
|
const nodeBased = [
|
||||||
@@ -17,7 +28,10 @@ const nodeBased = [
|
|||||||
'nextjs'
|
'nextjs'
|
||||||
];
|
];
|
||||||
|
|
||||||
export function setDefaultBaseImage(buildPack: string | null, deploymentType: string | null = null) {
|
export function setDefaultBaseImage(
|
||||||
|
buildPack: string | null,
|
||||||
|
deploymentType: string | null = null
|
||||||
|
) {
|
||||||
const nodeVersions = [
|
const nodeVersions = [
|
||||||
{
|
{
|
||||||
value: 'node:lts',
|
value: 'node:lts',
|
||||||
@@ -52,6 +66,14 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
{
|
{
|
||||||
value: 'webdevops/apache:alpine',
|
value: 'webdevops/apache:alpine',
|
||||||
label: 'webdevops/apache:alpine'
|
label: 'webdevops/apache:alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'nginx:alpine',
|
||||||
|
label: 'nginx:alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'httpd:alpine',
|
||||||
|
label: 'httpd:alpine (Apache)'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
const rustVersions = [
|
const rustVersions = [
|
||||||
@@ -214,8 +236,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
label: 'webdevops/php-apache:7.1-alpine'
|
label: 'webdevops/php-apache:7.1-alpine'
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
value: 'webdevops/php-nginx:7.1-alpine',
|
value: 'php:8.1-fpm',
|
||||||
label: 'webdevops/php-nginx:7.1-alpine'
|
label: 'php:8.1-fpm'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.0-fpm',
|
||||||
|
label: 'php:8.0-fpm'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.1-fpm-alpine',
|
||||||
|
label: 'php:8.1-fpm-alpine'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: 'php:8.0-fpm-alpine',
|
||||||
|
label: 'php:8.0-fpm-alpine'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
const pythonVersions = [
|
const pythonVersions = [
|
||||||
@@ -296,8 +330,8 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
{
|
{
|
||||||
value: 'heroku/builder-classic:22',
|
value: 'heroku/builder-classic:22',
|
||||||
label: 'heroku/builder-classic:22'
|
label: 'heroku/builder-classic:22'
|
||||||
},
|
}
|
||||||
]
|
];
|
||||||
let payload: any = {
|
let payload: any = {
|
||||||
baseImage: null,
|
baseImage: null,
|
||||||
baseBuildImage: null,
|
baseBuildImage: null,
|
||||||
@@ -306,8 +340,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
};
|
};
|
||||||
if (nodeBased.includes(buildPack)) {
|
if (nodeBased.includes(buildPack)) {
|
||||||
if (deploymentType === 'static') {
|
if (deploymentType === 'static') {
|
||||||
payload.baseImage = 'webdevops/nginx:alpine';
|
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||||
payload.baseImages = staticVersions;
|
payload.baseImages = isARM(process.arch)
|
||||||
|
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||||
|
: staticVersions;
|
||||||
payload.baseBuildImage = 'node:lts';
|
payload.baseBuildImage = 'node:lts';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
} else {
|
} else {
|
||||||
@@ -318,8 +354,10 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (staticApps.includes(buildPack)) {
|
if (staticApps.includes(buildPack)) {
|
||||||
payload.baseImage = 'webdevops/nginx:alpine';
|
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||||
payload.baseImages = staticVersions;
|
payload.baseImages = isARM(process.arch)
|
||||||
|
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||||
|
: staticVersions;
|
||||||
payload.baseBuildImage = 'node:lts';
|
payload.baseBuildImage = 'node:lts';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
}
|
}
|
||||||
@@ -337,12 +375,20 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
|||||||
payload.baseImage = 'denoland/deno:latest';
|
payload.baseImage = 'denoland/deno:latest';
|
||||||
}
|
}
|
||||||
if (buildPack === 'php') {
|
if (buildPack === 'php') {
|
||||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
payload.baseImage = isARM(process.arch)
|
||||||
payload.baseImages = phpVersions;
|
? 'php:8.1-fpm-alpine'
|
||||||
|
: 'webdevops/php-apache:8.2-alpine';
|
||||||
|
payload.baseImages = isARM(process.arch)
|
||||||
|
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||||
|
: phpVersions;
|
||||||
}
|
}
|
||||||
if (buildPack === 'laravel') {
|
if (buildPack === 'laravel') {
|
||||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
payload.baseImage = isARM(process.arch)
|
||||||
payload.baseImages = phpVersions;
|
? 'php:8.1-fpm-alpine'
|
||||||
|
: 'webdevops/php-apache:8.2-alpine';
|
||||||
|
payload.baseImages = isARM(process.arch)
|
||||||
|
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||||
|
: phpVersions;
|
||||||
payload.baseBuildImage = 'node:18';
|
payload.baseBuildImage = 'node:18';
|
||||||
payload.baseBuildImages = nodeVersions;
|
payload.baseBuildImages = nodeVersions;
|
||||||
}
|
}
|
||||||
@@ -363,6 +409,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
} = data;
|
} = data;
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
@@ -384,7 +431,8 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||||
if (baseDirectory) {
|
if (baseDirectory) {
|
||||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1);
|
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
|
||||||
|
baseDirectory = baseDirectory.slice(0, -1);
|
||||||
}
|
}
|
||||||
if (dockerFileLocation) {
|
if (dockerFileLocation) {
|
||||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||||
@@ -392,6 +440,14 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
} else {
|
} else {
|
||||||
dockerFileLocation = '/Dockerfile';
|
dockerFileLocation = '/Dockerfile';
|
||||||
}
|
}
|
||||||
|
if (dockerComposeFileLocation) {
|
||||||
|
if (!dockerComposeFileLocation.startsWith('/'))
|
||||||
|
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||||
|
if (dockerComposeFileLocation.endsWith('/'))
|
||||||
|
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||||
|
} else {
|
||||||
|
dockerComposeFileLocation = '/Dockerfile';
|
||||||
|
}
|
||||||
if (!denoMainFile) {
|
if (!denoMainFile) {
|
||||||
denoMainFile = 'main.ts';
|
denoMainFile = 'main.ts';
|
||||||
}
|
}
|
||||||
@@ -405,6 +461,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@@ -451,7 +508,6 @@ export const scanningTemplates = {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
export const saveBuildLog = async ({
|
export const saveBuildLog = async ({
|
||||||
line,
|
line,
|
||||||
buildId,
|
buildId,
|
||||||
@@ -461,14 +517,26 @@ export const saveBuildLog = async ({
|
|||||||
buildId: string;
|
buildId: string;
|
||||||
applicationId: string;
|
applicationId: string;
|
||||||
}): Promise<any> => {
|
}): Promise<any> => {
|
||||||
const { default: got } = await import('got')
|
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||||
|
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||||
|
const { default: got } = await import('got');
|
||||||
|
if (typeof line === 'object' && line) {
|
||||||
|
if (line.shortMessage) {
|
||||||
|
line = line.shortMessage + '\n' + line.stderr;
|
||||||
|
} else {
|
||||||
|
line = JSON.stringify(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||||
const regex = /ghs_.*@/g;
|
const regex = /ghs_.*@/g;
|
||||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||||
}
|
}
|
||||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||||
const fluentBitUrl = isDev ? process.env.COOLIFY_CONTAINER_DEV === 'true' ? 'http://coolify-fluentbit:24224' : 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
|
const fluentBitUrl = isDev
|
||||||
|
? process.env.COOLIFY_CONTAINER_DEV === 'true'
|
||||||
|
? 'http://coolify-fluentbit:24224'
|
||||||
|
: 'http://localhost:24224'
|
||||||
|
: 'http://coolify-fluentbit:24224';
|
||||||
|
|
||||||
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
||||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||||
@@ -478,15 +546,17 @@ export const saveBuildLog = async ({
|
|||||||
json: {
|
json: {
|
||||||
line: encrypt(line)
|
line: encrypt(line)
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return await prisma.buildLog.create({
|
return await prisma.buildLog.create({
|
||||||
data: {
|
data: {
|
||||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
line: addTimestamp,
|
||||||
|
buildId,
|
||||||
|
time: Number(day().valueOf()),
|
||||||
|
applicationId
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function copyBaseConfigurationFiles(
|
export async function copyBaseConfigurationFiles(
|
||||||
@@ -558,6 +628,7 @@ export async function copyBaseConfigurationFiles(
|
|||||||
`
|
`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
@@ -571,6 +642,29 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||||
|
if (!username || !password) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let decryptedPassword = decrypt(password);
|
||||||
|
const location = `${workdir}/.docker`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fs.mkdir(`${workdir}/.docker`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
const payload = JSON.stringify({
|
||||||
|
auths: {
|
||||||
|
[url]: {
|
||||||
|
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
await fs.writeFile(`${location}/config.json`, payload);
|
||||||
|
return location;
|
||||||
|
}
|
||||||
export async function buildImage({
|
export async function buildImage({
|
||||||
applicationId,
|
applicationId,
|
||||||
tag,
|
tag,
|
||||||
@@ -580,36 +674,51 @@ export async function buildImage({
|
|||||||
isCache = false,
|
isCache = false,
|
||||||
debug = false,
|
debug = false,
|
||||||
dockerFileLocation = '/Dockerfile',
|
dockerFileLocation = '/Dockerfile',
|
||||||
commit
|
commit,
|
||||||
|
forceRebuild = false
|
||||||
}) {
|
}) {
|
||||||
if (isCache) {
|
if (isCache) {
|
||||||
await saveBuildLog({ line: `Building cache image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||||
} else {
|
} else {
|
||||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||||
}
|
}
|
||||||
if (!debug) {
|
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
|
||||||
await saveBuildLog({
|
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`;
|
||||||
line: `Debug turned off. To see more details, allow it in the features tab.`,
|
let location = null;
|
||||||
buildId,
|
|
||||||
applicationId
|
const { dockerRegistry } = await prisma.application.findUnique({
|
||||||
|
where: { id: applicationId },
|
||||||
|
select: { dockerRegistry: true }
|
||||||
});
|
});
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry;
|
||||||
|
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||||
}
|
}
|
||||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`
|
|
||||||
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`
|
|
||||||
|
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker build --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}` })
|
await executeCommand({
|
||||||
|
stream: true,
|
||||||
|
debug,
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
dockerId,
|
||||||
|
command: `docker ${location ? `--config ${location}` : ''} build ${
|
||||||
|
forceRebuild ? '--no-cache' : ''
|
||||||
|
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||||
|
});
|
||||||
|
|
||||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } })
|
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
|
||||||
if (status === 'canceled') {
|
if (status === 'canceled') {
|
||||||
throw new Error('Deployment canceled.')
|
throw new Error('Canceled.');
|
||||||
}
|
|
||||||
if (isCache) {
|
|
||||||
await saveBuildLog({ line: `Building cache image successful.`, buildId, applicationId });
|
|
||||||
} else {
|
|
||||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||||
|
return [
|
||||||
|
'coolify.managed=true',
|
||||||
|
`coolify.version=${version}`,
|
||||||
|
`coolify.applicationId=${applicationId}`,
|
||||||
|
`coolify.type=standalone-application`
|
||||||
|
];
|
||||||
|
}
|
||||||
export function makeLabelForStandaloneApplication({
|
export function makeLabelForStandaloneApplication({
|
||||||
applicationId,
|
applicationId,
|
||||||
fqdn,
|
fqdn,
|
||||||
@@ -638,6 +747,7 @@ export function makeLabelForStandaloneApplication({
|
|||||||
`coolify.version=${version}`,
|
`coolify.version=${version}`,
|
||||||
`coolify.applicationId=${applicationId}`,
|
`coolify.applicationId=${applicationId}`,
|
||||||
`coolify.type=standalone-application`,
|
`coolify.type=standalone-application`,
|
||||||
|
`coolify.name=${name}`,
|
||||||
`coolify.configuration=${base64Encode(
|
`coolify.configuration=${base64Encode(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -680,15 +790,15 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
|||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (secret.isBuildSecret) {
|
if (secret.isBuildSecret) {
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -718,15 +828,15 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
|||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (secret.isBuildSecret) {
|
if (secret.isBuildSecret) {
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -740,11 +850,7 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||||
const {
|
const { applicationId, workdir, buildId } = data;
|
||||||
applicationId,
|
|
||||||
workdir,
|
|
||||||
buildId,
|
|
||||||
} = data;
|
|
||||||
|
|
||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import { defaultComposeConfiguration, executeDockerCmd } from '../common';
|
import { defaultComposeConfiguration, executeCommand } from '../common';
|
||||||
import { buildImage, saveBuildLog } from './common';
|
import { saveBuildLog } from './common';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
@@ -16,43 +16,28 @@ export default async function (data) {
|
|||||||
baseDirectory,
|
baseDirectory,
|
||||||
secrets,
|
secrets,
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
port,
|
dockerComposeConfiguration,
|
||||||
dockerComposeConfiguration
|
dockerComposeFileLocation
|
||||||
} = data
|
} = data;
|
||||||
const fileYml = `${workdir}${baseDirectory}/docker-compose.yml`;
|
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||||
const fileYaml = `${workdir}${baseDirectory}/docker-compose.yaml`;
|
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
|
||||||
let dockerComposeRaw = null;
|
const dockerComposeYaml = yaml.load(dockerComposeRaw);
|
||||||
let isYml = false;
|
|
||||||
try {
|
|
||||||
dockerComposeRaw = await fs.readFile(`${fileYml}`, 'utf8')
|
|
||||||
isYml = true
|
|
||||||
} catch (error) { }
|
|
||||||
try {
|
|
||||||
dockerComposeRaw = await fs.readFile(`${fileYaml}`, 'utf8')
|
|
||||||
} catch (error) { }
|
|
||||||
|
|
||||||
if (!dockerComposeRaw) {
|
|
||||||
throw ('docker-compose.yml or docker-compose.yaml are not found!');
|
|
||||||
}
|
|
||||||
const dockerComposeYaml = yaml.load(dockerComposeRaw)
|
|
||||||
if (!dockerComposeYaml.services) {
|
if (!dockerComposeYaml.services) {
|
||||||
throw 'No Services found in docker-compose file.'
|
throw 'No Services found in docker-compose file.';
|
||||||
}
|
}
|
||||||
const envs = [
|
const envs = [];
|
||||||
`PORT=${port}`
|
|
||||||
];
|
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -64,37 +49,78 @@ export default async function (data) {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
const composeVolumes = volumes.map((volume) => {
|
const composeVolumes = [];
|
||||||
return {
|
if (volumes.length > 0) {
|
||||||
[`${volume.split(':')[0]}`]: {
|
for (const volume of volumes) {
|
||||||
name: volume.split(':')[0]
|
let [v, path] = volume.split(':');
|
||||||
}
|
composeVolumes[v] = {
|
||||||
|
name: v
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
let networks = {}
|
}
|
||||||
|
|
||||||
|
let networks = {};
|
||||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||||
value['container_name'] = `${applicationId}-${key}`
|
value['container_name'] = `${applicationId}-${key}`;
|
||||||
value['env_file'] = envFound ? [`${workdir}/.env`] : []
|
value['env_file'] = envFound ? [`${workdir}/.env`] : [];
|
||||||
value['labels'] = labels
|
value['labels'] = labels;
|
||||||
value['volumes'] = volumes
|
// TODO: If we support separated volume for each service, we need to add it here
|
||||||
|
if (value['volumes']?.length > 0) {
|
||||||
|
value['volumes'] = value['volumes'].map((volume) => {
|
||||||
|
let [v, path, permission] = volume.split(':');
|
||||||
|
if (!path) {
|
||||||
|
path = v;
|
||||||
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
} else {
|
||||||
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
}
|
||||||
|
composeVolumes[v] = {
|
||||||
|
name: v
|
||||||
|
};
|
||||||
|
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (volumes.length > 0) {
|
||||||
|
for (const volume of volumes) {
|
||||||
|
value['volumes'].push(volume);
|
||||||
|
}
|
||||||
|
}
|
||||||
if (dockerComposeConfiguration[key].port) {
|
if (dockerComposeConfiguration[key].port) {
|
||||||
value['expose'] = [dockerComposeConfiguration[key].port]
|
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||||
}
|
}
|
||||||
if (value['networks']?.length > 0) {
|
if (value['networks']?.length > 0) {
|
||||||
value['networks'].forEach((network) => {
|
value['networks'].forEach((network) => {
|
||||||
networks[network] = {
|
networks[network] = {
|
||||||
name: network
|
name: network
|
||||||
|
};
|
||||||
|
});
|
||||||
}
|
}
|
||||||
})
|
value['networks'] = [...(value['networks'] || ''), network];
|
||||||
|
dockerComposeYaml.services[key] = {
|
||||||
|
...dockerComposeYaml.services[key],
|
||||||
|
restart: defaultComposeConfiguration(network).restart,
|
||||||
|
deploy: defaultComposeConfiguration(network).deploy
|
||||||
|
};
|
||||||
}
|
}
|
||||||
value['networks'] = [...value['networks'] || '', network]
|
if (Object.keys(composeVolumes).length > 0) {
|
||||||
dockerComposeYaml.services[key] = { ...dockerComposeYaml.services[key], restart: defaultComposeConfiguration(network).restart, deploy: defaultComposeConfiguration(network).deploy }
|
dockerComposeYaml['volumes'] = { ...composeVolumes };
|
||||||
}
|
}
|
||||||
dockerComposeYaml['volumes'] = Object.assign({ ...dockerComposeYaml['volumes'] }, ...composeVolumes)
|
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } })
|
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||||
await fs.writeFile(`${workdir}/docker-compose.${isYml ? 'yml' : 'yaml'}`, yaml.dump(dockerComposeYaml));
|
await executeCommand({
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} pull` })
|
debug,
|
||||||
await saveBuildLog({ line: 'Pulling images from Compose file.', buildId, applicationId });
|
buildId,
|
||||||
await executeDockerCmd({ debug, buildId, applicationId, dockerId, command: `docker compose --project-directory ${workdir} build --progress plain` })
|
applicationId,
|
||||||
await saveBuildLog({ line: 'Building images from Compose file.', buildId, applicationId });
|
dockerId,
|
||||||
|
command: `docker compose --project-directory ${workdir} pull`
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||||
|
await executeCommand({
|
||||||
|
debug,
|
||||||
|
buildId,
|
||||||
|
applicationId,
|
||||||
|
dockerId,
|
||||||
|
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||||
|
});
|
||||||
|
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,13 +29,13 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,25 +2,16 @@ import { promises as fs } from 'fs';
|
|||||||
import { buildImage } from './common';
|
import { buildImage } from './common';
|
||||||
|
|
||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
let {
|
let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
|
||||||
applicationId,
|
|
||||||
debug,
|
|
||||||
tag,
|
|
||||||
workdir,
|
|
||||||
buildId,
|
|
||||||
baseDirectory,
|
|
||||||
secrets,
|
|
||||||
pullmergeRequestId,
|
|
||||||
dockerFileLocation
|
|
||||||
} = data
|
|
||||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||||
data.workdir = `${workdir}${baseDirectory}`;
|
data.workdir = `${workdir}${baseDirectory}`;
|
||||||
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8')
|
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8');
|
||||||
const Dockerfile: Array<string> = DockerfileRaw
|
const Dockerfile: Array<string> = DockerfileRaw.toString().trim().split('\n');
|
||||||
.toString()
|
Dockerfile.forEach((line, index) => {
|
||||||
.trim()
|
if (line.startsWith('FROM')) {
|
||||||
.split('\n');
|
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
}
|
||||||
|
});
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (secret.isBuildSecret) {
|
if (secret.isBuildSecret) {
|
||||||
@@ -28,18 +19,15 @@ export default async function (data) {
|
|||||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||||
) {
|
) {
|
||||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
|
||||||
|
|
||||||
Dockerfile.forEach((line, index) => {
|
Dockerfile.forEach((line, index) => {
|
||||||
if (line.startsWith('FROM')) {
|
if (line.startsWith('FROM')) {
|
||||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||||
await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
|
||||||
await buildImage(data);
|
await buildImage(data);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
import { executeDockerCmd, prisma } from "../common"
|
import { executeCommand } from "../common"
|
||||||
import { saveBuildLog } from "./common";
|
import { saveBuildLog } from "./common";
|
||||||
|
|
||||||
export default async function (data: any): Promise<void> {
|
export default async function (data: any): Promise<void> {
|
||||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||||
try {
|
try {
|
||||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
buildId,
|
buildId,
|
||||||
debug,
|
debug,
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||||
})
|
})
|
||||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,13 +29,13 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,15 +23,15 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
if (secret.isBuildSecret) {
|
if (secret.isBuildSecret) {
|
||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter((s) => s.name === secret.name && s.isPRMRSecret);
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,13 +29,13 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,13 +18,13 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,13 +23,13 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { promises as fs } from 'fs';
|
import { promises as fs } from 'fs';
|
||||||
import TOML from '@iarna/toml';
|
import TOML from '@iarna/toml';
|
||||||
import { asyncExecShell } from '../common';
|
import { executeCommand } from '../common';
|
||||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||||
|
|
||||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||||
@@ -28,7 +28,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
|||||||
export default async function (data) {
|
export default async function (data) {
|
||||||
try {
|
try {
|
||||||
const { workdir, baseImage, baseBuildImage } = data;
|
const { workdir, baseImage, baseBuildImage } = data;
|
||||||
const { stdout: cargoToml } = await asyncExecShell(`cat ${workdir}/Cargo.toml`);
|
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||||
const parsedToml: any = TOML.parse(cargoToml);
|
const parsedToml: any = TOML.parse(cargoToml);
|
||||||
const name = parsedToml.package.name;
|
const name = parsedToml.package.name;
|
||||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||||
|
|||||||
@@ -18,7 +18,11 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
const Dockerfile: Array<string> = [];
|
const Dockerfile: Array<string> = [];
|
||||||
|
|
||||||
Dockerfile.push(`FROM ${image}`);
|
Dockerfile.push(`FROM ${image}`);
|
||||||
|
if (baseImage?.includes('httpd')) {
|
||||||
|
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
|
||||||
|
} else {
|
||||||
Dockerfile.push('WORKDIR /app');
|
Dockerfile.push('WORKDIR /app');
|
||||||
|
}
|
||||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||||
if (secrets.length > 0) {
|
if (secrets.length > 0) {
|
||||||
secrets.forEach((secret) => {
|
secrets.forEach((secret) => {
|
||||||
@@ -26,13 +30,13 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
Dockerfile.push(`ARG ${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
Dockerfile.push(`ARG ${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,18 +8,21 @@ import type { Config } from 'unique-names-generator';
|
|||||||
import generator from 'generate-password';
|
import generator from 'generate-password';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { promises as dns } from 'dns';
|
import { promises as dns } from 'dns';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
import { PrismaClient } from '@prisma/client';
|
import { PrismaClient } from '@prisma/client';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import sshConfig from 'ssh-config';
|
import sshConfig from 'ssh-config';
|
||||||
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import { checkContainer, removeContainer } from './docker';
|
import { checkContainer, removeContainer } from './docker';
|
||||||
import { day } from './dayjs';
|
import { day } from './dayjs';
|
||||||
import { saveBuildLog } from './buildPacks/common';
|
import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common';
|
||||||
import { scheduler } from './scheduler';
|
import { scheduler } from './scheduler';
|
||||||
|
import type { ExecaChildProcess } from 'execa';
|
||||||
|
|
||||||
export const version = '3.11.2';
|
export const version = '3.12.2';
|
||||||
export const isDev = process.env.NODE_ENV === 'development';
|
export const isDev = process.env.NODE_ENV === 'development';
|
||||||
|
export const sentryDSN =
|
||||||
|
'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216';
|
||||||
const algorithm = 'aes-256-ctr';
|
const algorithm = 'aes-256-ctr';
|
||||||
const customConfig: Config = {
|
const customConfig: Config = {
|
||||||
dictionaries: [adjectives, colors, animals],
|
dictionaries: [adjectives, colors, animals],
|
||||||
@@ -28,9 +31,6 @@ const customConfig: Config = {
|
|||||||
length: 3
|
length: 3
|
||||||
};
|
};
|
||||||
|
|
||||||
export const defaultProxyImage = `coolify-haproxy-alpine:latest`;
|
|
||||||
export const defaultProxyImageTcp = `coolify-haproxy-tcp-alpine:latest`;
|
|
||||||
export const defaultProxyImageHttp = `coolify-haproxy-http-alpine:latest`;
|
|
||||||
export const defaultTraefikImage = `traefik:v2.8`;
|
export const defaultTraefikImage = `traefik:v2.8`;
|
||||||
export function getAPIUrl() {
|
export function getAPIUrl() {
|
||||||
if (process.env.GITPOD_WORKSPACE_URL) {
|
if (process.env.GITPOD_WORKSPACE_URL) {
|
||||||
@@ -65,7 +65,6 @@ const otherTraefikEndpoint = isDev
|
|||||||
: 'http://coolify:3000/webhooks/traefik/other.json';
|
: 'http://coolify:3000/webhooks/traefik/other.json';
|
||||||
|
|
||||||
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
||||||
export const asyncExecShell = util.promisify(exec);
|
|
||||||
export const asyncExecShellStream = async ({
|
export const asyncExecShellStream = async ({
|
||||||
debug,
|
debug,
|
||||||
buildId,
|
buildId,
|
||||||
@@ -94,7 +93,7 @@ export const asyncExecShellStream = async ({
|
|||||||
line: `${line.replace('\n', '')}`,
|
line: `${line.replace('\n', '')}`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
}
|
};
|
||||||
logs.push(log);
|
logs.push(log);
|
||||||
if (debug) {
|
if (debug) {
|
||||||
await saveBuildLog(log);
|
await saveBuildLog(log);
|
||||||
@@ -111,7 +110,7 @@ export const asyncExecShellStream = async ({
|
|||||||
line: `${line.replace('\n', '')}`,
|
line: `${line.replace('\n', '')}`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
}
|
};
|
||||||
logs.push(log);
|
logs.push(log);
|
||||||
if (debug) {
|
if (debug) {
|
||||||
await saveBuildLog(log);
|
await saveBuildLog(log);
|
||||||
@@ -305,7 +304,7 @@ export async function isDomainConfigured({
|
|||||||
|
|
||||||
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||||
});
|
});
|
||||||
@@ -395,7 +394,6 @@ export function generateTimestamp(): string {
|
|||||||
return `${day().format('HH:mm:ss.SSS')}`;
|
return `${day().format('HH:mm:ss.SSS')}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export const supportedDatabaseTypesAndVersions = [
|
export const supportedDatabaseTypesAndVersions = [
|
||||||
{
|
{
|
||||||
name: 'mongodb',
|
name: 'mongodb',
|
||||||
@@ -510,44 +508,20 @@ export async function createRemoteEngineConfiguration(id: string) {
|
|||||||
remoteUser
|
remoteUser
|
||||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||||
// Needed for remote docker compose
|
|
||||||
// const { stdout: numberOfSSHAgentsRunning } = await asyncExecShell(
|
|
||||||
// `ps ax | grep [s]sh-agent | grep coolify-ssh-agent.pid | grep -v grep | wc -l`
|
|
||||||
// );
|
|
||||||
// if (numberOfSSHAgentsRunning !== '' && Number(numberOfSSHAgentsRunning.trim()) == 0) {
|
|
||||||
// try {
|
|
||||||
// await fs.stat(`/tmp/coolify-ssh-agent.pid`);
|
|
||||||
// await fs.rm(`/tmp/coolify-ssh-agent.pid`);
|
|
||||||
// } catch (error) { }
|
|
||||||
// await asyncExecShell(`eval $(ssh-agent -sa /tmp/coolify-ssh-agent.pid)`);
|
|
||||||
// }
|
|
||||||
// await asyncExecShell(`SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh-add -q ${sshKeyFile}`);
|
|
||||||
|
|
||||||
// const { stdout: numberOfSSHTunnelsRunning } = await asyncExecShell(
|
|
||||||
// `ps ax | grep 'ssh -F /dev/null -o StrictHostKeyChecking no -fNL ${localPort}:localhost:${remotePort}' | grep -v grep | wc -l`
|
|
||||||
// );
|
|
||||||
// if (numberOfSSHTunnelsRunning !== '' && Number(numberOfSSHTunnelsRunning.trim()) == 0) {
|
|
||||||
// try {
|
|
||||||
// await asyncExecShell(
|
|
||||||
// `SSH_AUTH_SOCK=/tmp/coolify-ssh-agent.pid ssh -F /dev/null -o "StrictHostKeyChecking no" -fNL ${localPort}:localhost:${remotePort} ${remoteUser}@${remoteIpAddress}`
|
|
||||||
// );
|
|
||||||
// } catch (error) { }
|
|
||||||
// }
|
|
||||||
const config = sshConfig.parse('');
|
const config = sshConfig.parse('');
|
||||||
const Host = `${remoteIpAddress}-remote`
|
const Host = `${remoteIpAddress}-remote`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(`ssh-keygen -R ${Host}`);
|
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||||
await asyncExecShell(`ssh-keygen -R ${remoteIpAddress}`);
|
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||||
await asyncExecShell(`ssh-keygen -R localhost:${localPort}`);
|
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
|
|
||||||
|
|
||||||
const found = config.find({ Host });
|
const found = config.find({ Host });
|
||||||
const foundIp = config.find({ Host: remoteIpAddress });
|
const foundIp = config.find({ Host: remoteIpAddress });
|
||||||
|
|
||||||
if (found) config.remove({ Host })
|
if (found) config.remove({ Host });
|
||||||
if (foundIp) config.remove({ Host: remoteIpAddress })
|
if (foundIp) config.remove({ Host: remoteIpAddress });
|
||||||
|
|
||||||
config.append({
|
config.append({
|
||||||
Host,
|
Host,
|
||||||
@@ -568,22 +542,35 @@ export async function createRemoteEngineConfiguration(id: string) {
|
|||||||
}
|
}
|
||||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
||||||
}
|
}
|
||||||
export async function executeSSHCmd({ dockerId, command }) {
|
export async function executeCommand({
|
||||||
const { execaCommand } = await import('execa')
|
command,
|
||||||
let { remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
dockerId = null,
|
||||||
if (remoteEngine) {
|
sshCommand = false,
|
||||||
await createRemoteEngineConfiguration(dockerId)
|
shell = false,
|
||||||
}
|
stream = false,
|
||||||
if (process.env.CODESANDBOX_HOST) {
|
buildId,
|
||||||
if (command.startsWith('docker compose')) {
|
applicationId,
|
||||||
command = command.replace(/docker compose/gi, 'docker-compose')
|
debug
|
||||||
}
|
}: {
|
||||||
}
|
command: string;
|
||||||
return await execaCommand(`ssh ${remoteIpAddress}-remote ${command}`)
|
sshCommand?: boolean;
|
||||||
}
|
shell?: boolean;
|
||||||
export async function executeDockerCmd({ debug, buildId, applicationId, dockerId, command }: { debug?: boolean, buildId?: string, applicationId?: string, dockerId: string, command: string }): Promise<any> {
|
stream?: boolean;
|
||||||
const { execaCommand } = await import('execa')
|
dockerId?: string;
|
||||||
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({ where: { id: dockerId } })
|
buildId?: string;
|
||||||
|
applicationId?: string;
|
||||||
|
debug?: boolean;
|
||||||
|
}): Promise<ExecaChildProcess<string>> {
|
||||||
|
const { execa, execaCommand } = await import('execa');
|
||||||
|
const { parse } = await import('shell-quote');
|
||||||
|
const parsedCommand = parse(command);
|
||||||
|
const dockerCommand = parsedCommand[0];
|
||||||
|
const dockerArgs = parsedCommand.slice(1);
|
||||||
|
|
||||||
|
if (dockerId) {
|
||||||
|
let { remoteEngine, remoteIpAddress, engine } = await prisma.destinationDocker.findUnique({
|
||||||
|
where: { id: dockerId }
|
||||||
|
});
|
||||||
if (remoteEngine) {
|
if (remoteEngine) {
|
||||||
await createRemoteEngineConfiguration(dockerId);
|
await createRemoteEngineConfiguration(dockerId);
|
||||||
engine = `ssh://${remoteIpAddress}-remote`;
|
engine = `ssh://${remoteIpAddress}-remote`;
|
||||||
@@ -595,29 +582,114 @@ export async function executeDockerCmd({ debug, buildId, applicationId, dockerId
|
|||||||
command = command.replace(/docker compose/gi, 'docker-compose');
|
command = command.replace(/docker compose/gi, 'docker-compose');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (command.startsWith(`docker build`) || command.startsWith(`pack build`) || command.startsWith(`docker compose build`)) {
|
if (sshCommand) {
|
||||||
return await asyncExecShellStream({ debug, buildId, applicationId, command, engine });
|
if (shell) {
|
||||||
|
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`);
|
||||||
}
|
}
|
||||||
return await execaCommand(command, { env: { DOCKER_BUILDKIT: "1", DOCKER_HOST: engine }, shell: true })
|
return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]);
|
||||||
}
|
}
|
||||||
|
if (stream) {
|
||||||
|
return await new Promise(async (resolve, reject) => {
|
||||||
|
let subprocess = null;
|
||||||
|
if (shell) {
|
||||||
|
subprocess = execaCommand(command, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
subprocess = execa(dockerCommand, dockerArgs, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const logs = [];
|
||||||
|
subprocess.stdout.on('data', async (data) => {
|
||||||
|
const stdout = data.toString();
|
||||||
|
const array = stdout.split('\n');
|
||||||
|
for (const line of array) {
|
||||||
|
if (line !== '\n' && line !== '') {
|
||||||
|
const log = {
|
||||||
|
line: `${line.replace('\n', '')}`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
};
|
||||||
|
logs.push(log);
|
||||||
|
if (debug) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
subprocess.stderr.on('data', async (data) => {
|
||||||
|
const stderr = data.toString();
|
||||||
|
const array = stderr.split('\n');
|
||||||
|
for (const line of array) {
|
||||||
|
if (line !== '\n' && line !== '') {
|
||||||
|
const log = {
|
||||||
|
line: `${line.replace('\n', '')}`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
};
|
||||||
|
logs.push(log);
|
||||||
|
if (debug) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
subprocess.on('exit', async (code) => {
|
||||||
|
if (code === 0) {
|
||||||
|
resolve(code);
|
||||||
|
} else {
|
||||||
|
if (!debug) {
|
||||||
|
for (const log of logs) {
|
||||||
|
await saveBuildLog(log);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reject(code);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
if (shell) {
|
||||||
|
return await execaCommand(command, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return await execa(dockerCommand, dockerArgs, {
|
||||||
|
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (shell) {
|
||||||
|
return execaCommand(command, { shell: true });
|
||||||
|
}
|
||||||
|
return await execa(dockerCommand, dockerArgs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function startTraefikProxy(id: string): Promise<void> {
|
export async function startTraefikProxy(id: string): Promise<void> {
|
||||||
const { engine, network, remoteEngine, remoteIpAddress } = await prisma.destinationDocker.findUnique({ where: { id } })
|
const { engine, network, remoteEngine, remoteIpAddress } =
|
||||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-proxy', remove: true });
|
await prisma.destinationDocker.findUnique({ where: { id } });
|
||||||
|
const { found } = await checkContainer({
|
||||||
|
dockerId: id,
|
||||||
|
container: 'coolify-proxy',
|
||||||
|
remove: true
|
||||||
|
});
|
||||||
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
||||||
|
|
||||||
if (!found) {
|
if (!found) {
|
||||||
const { stdout: coolifyNetwork } = await executeDockerCmd({
|
const { stdout: coolifyNetwork } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!coolifyNetwork) {
|
if (!coolifyNetwork) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network create --attachable coolify-infra`
|
command: `docker network create --attachable coolify-infra`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const { stdout: Config } = await executeDockerCmd({
|
const { stdout: Config } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||||
});
|
});
|
||||||
@@ -632,7 +704,7 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
}
|
}
|
||||||
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
||||||
}
|
}
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker run --restart always \
|
command: `docker run --restart always \
|
||||||
--add-host 'host.docker.internal:host-gateway' \
|
--add-host 'host.docker.internal:host-gateway' \
|
||||||
@@ -657,7 +729,6 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
||||||
--log.level=error`
|
--log.level=error`
|
||||||
});
|
});
|
||||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
|
||||||
await prisma.destinationDocker.update({
|
await prisma.destinationDocker.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: true }
|
data: { isCoolifyProxyUsed: true }
|
||||||
@@ -681,13 +752,13 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
|||||||
|
|
||||||
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
||||||
const { id } = destination;
|
const { id } = destination;
|
||||||
const { stdout: networks } = await executeDockerCmd({
|
const { stdout: networks } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
||||||
});
|
});
|
||||||
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
||||||
if (!configuredNetworks.includes(destination.network)) {
|
if (!configuredNetworks.includes(destination.network)) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destination.id,
|
dockerId: destination.id,
|
||||||
command: `docker network connect ${destination.network} coolify-proxy`
|
command: `docker network connect ${destination.network} coolify-proxy`
|
||||||
});
|
});
|
||||||
@@ -702,13 +773,12 @@ export async function stopTraefikProxy(
|
|||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: false }
|
data: { isCoolifyProxyUsed: false }
|
||||||
});
|
});
|
||||||
const { id: settingsId } = await prisma.setting.findFirst({});
|
|
||||||
await prisma.setting.update({ where: { id: settingsId }, data: { proxyHash: null } });
|
|
||||||
try {
|
try {
|
||||||
if (found) {
|
if (found) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`
|
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -717,11 +787,17 @@ export async function stopTraefikProxy(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function listSettings(): Promise<any> {
|
export async function listSettings(): Promise<any> {
|
||||||
const settings = await prisma.setting.findFirst({});
|
return await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
|
|
||||||
return settings;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function generateToken() {
|
||||||
|
return jsonwebtoken.sign(
|
||||||
|
{
|
||||||
|
nbf: Math.floor(Date.now() / 1000) - 30
|
||||||
|
},
|
||||||
|
process.env['COOLIFY_SECRET_KEY']
|
||||||
|
);
|
||||||
|
}
|
||||||
export function generatePassword({
|
export function generatePassword({
|
||||||
length = 24,
|
length = 24,
|
||||||
symbols = false,
|
symbols = false,
|
||||||
@@ -740,7 +816,8 @@ export function generatePassword({
|
|||||||
return password;
|
return password;
|
||||||
}
|
}
|
||||||
|
|
||||||
type DatabaseConfiguration = {
|
type DatabaseConfiguration =
|
||||||
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
@@ -831,18 +908,10 @@ type DatabaseConfiguration = {
|
|||||||
EDGEDB_SERVER_DATABASE: string;
|
EDGEDB_SERVER_DATABASE: string;
|
||||||
EDGEDB_SERVER_TLS_CERT_MODE: string;
|
EDGEDB_SERVER_TLS_CERT_MODE: string;
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
export function generateDatabaseConfiguration(database: any, arch: string): DatabaseConfiguration {
|
export function generateDatabaseConfiguration(database: any, arch: string): DatabaseConfiguration {
|
||||||
const {
|
const { id, dbUser, dbUserPassword, rootUser, rootUserPassword, defaultDatabase, version, type } =
|
||||||
id,
|
database;
|
||||||
dbUser,
|
|
||||||
dbUserPassword,
|
|
||||||
rootUser,
|
|
||||||
rootUserPassword,
|
|
||||||
defaultDatabase,
|
|
||||||
version,
|
|
||||||
type,
|
|
||||||
} = database;
|
|
||||||
const baseImage = getDatabaseImage(type, arch);
|
const baseImage = getDatabaseImage(type, arch);
|
||||||
if (type === 'mysql') {
|
if (type === 'mysql') {
|
||||||
const configuration = {
|
const configuration = {
|
||||||
@@ -922,7 +991,9 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data
|
|||||||
}
|
}
|
||||||
return configuration;
|
return configuration;
|
||||||
} else if (type === 'redis') {
|
} else if (type === 'redis') {
|
||||||
const { settings: { appendOnly } } = database;
|
const {
|
||||||
|
settings: { appendOnly }
|
||||||
|
} = database;
|
||||||
const configuration: DatabaseConfiguration = {
|
const configuration: DatabaseConfiguration = {
|
||||||
privatePort: 6379,
|
privatePort: 6379,
|
||||||
command: undefined,
|
command: undefined,
|
||||||
@@ -936,7 +1007,8 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data
|
|||||||
};
|
};
|
||||||
if (isARM(arch)) {
|
if (isARM(arch)) {
|
||||||
configuration.volume = `${id}-${type}-data:/data`;
|
configuration.volume = `${id}-${type}-data:/data`;
|
||||||
configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no'
|
configuration.command = `/usr/local/bin/redis-server --appendonly ${
|
||||||
|
appendOnly ? 'yes' : 'no'
|
||||||
} --requirepass ${dbUserPassword}`;
|
} --requirepass ${dbUserPassword}`;
|
||||||
}
|
}
|
||||||
return configuration;
|
return configuration;
|
||||||
@@ -954,7 +1026,7 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data
|
|||||||
if (isARM(arch)) {
|
if (isARM(arch)) {
|
||||||
configuration.volume = `${id}-${type}-data:/opt/couchdb/data`;
|
configuration.volume = `${id}-${type}-data:/opt/couchdb/data`;
|
||||||
}
|
}
|
||||||
return configuration
|
return configuration;
|
||||||
} else if (type === 'edgedb') {
|
} else if (type === 'edgedb') {
|
||||||
const configuration: DatabaseConfiguration = {
|
const configuration: DatabaseConfiguration = {
|
||||||
privatePort: 5656,
|
privatePort: 5656,
|
||||||
@@ -968,11 +1040,11 @@ export function generateDatabaseConfiguration(database: any, arch: string): Data
|
|||||||
volume: `${id}-${type}-data:/var/lib/edgedb/data`,
|
volume: `${id}-${type}-data:/var/lib/edgedb/data`,
|
||||||
ulimits: {}
|
ulimits: {}
|
||||||
};
|
};
|
||||||
return configuration
|
return configuration;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export function isARM(arch: string) {
|
export function isARM(arch: string) {
|
||||||
if (arch === 'arm' || arch === 'arm64') {
|
if (arch === 'arm' || arch === 'arm64' || arch === 'aarch' || arch === 'aarch64') {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@@ -1073,6 +1145,7 @@ export async function makeLabelForStandaloneDatabase({ id, image, volume }) {
|
|||||||
'coolify.managed=true',
|
'coolify.managed=true',
|
||||||
`coolify.version=${version}`,
|
`coolify.version=${version}`,
|
||||||
`coolify.type=standalone-database`,
|
`coolify.type=standalone-database`,
|
||||||
|
`coolify.name=${database.name}`,
|
||||||
`coolify.configuration=${base64Encode(
|
`coolify.configuration=${base64Encode(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
version,
|
version,
|
||||||
@@ -1090,7 +1163,7 @@ export const createDirectories = async ({
|
|||||||
repository: string;
|
repository: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
}): Promise<{ workdir: string; repodir: string }> => {
|
}): Promise<{ workdir: string; repodir: string }> => {
|
||||||
repository = repository.replaceAll(' ', '')
|
if (repository) repository = repository.replaceAll(' ', '');
|
||||||
const repodir = `/tmp/build-sources/${repository}/`;
|
const repodir = `/tmp/build-sources/${repository}/`;
|
||||||
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
||||||
let workdirFound = false;
|
let workdirFound = false;
|
||||||
@@ -1098,9 +1171,9 @@ export const createDirectories = async ({
|
|||||||
workdirFound = !!(await fs.stat(workdir));
|
workdirFound = !!(await fs.stat(workdir));
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
if (workdirFound) {
|
if (workdirFound) {
|
||||||
await asyncExecShell(`rm -fr ${workdir}`);
|
await executeCommand({ command: `rm -fr ${workdir}` });
|
||||||
}
|
}
|
||||||
await asyncExecShell(`mkdir -p ${workdir}`);
|
await executeCommand({ command: `mkdir -p ${workdir}` });
|
||||||
return {
|
return {
|
||||||
workdir,
|
workdir,
|
||||||
repodir
|
repodir
|
||||||
@@ -1116,7 +1189,7 @@ export async function stopDatabaseContainer(database: any): Promise<boolean> {
|
|||||||
} = database;
|
} = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||||
});
|
});
|
||||||
@@ -1144,9 +1217,10 @@ export async function stopTcpHttpProxy(
|
|||||||
const { found } = await checkContainer({ dockerId, container });
|
const { found } = await checkContainer({ dockerId, container });
|
||||||
try {
|
try {
|
||||||
if (found) {
|
if (found) {
|
||||||
return await executeDockerCmd({
|
return await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1168,53 +1242,79 @@ export async function updatePasswordInDb(database, user, newPassword, isRoot) {
|
|||||||
} = database;
|
} = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type === 'mysql') {
|
if (type === 'mysql') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
|
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"ALTER USER '${user}'@'%' IDENTIFIED WITH caching_sha2_password BY '${newPassword}';\"`
|
||||||
});
|
});
|
||||||
} else if (type === 'mariadb') {
|
} else if (type === 'mariadb') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
|
command: `docker exec ${id} mysql -u ${rootUser} -p${rootUserPassword} -e \"SET PASSWORD FOR '${user}'@'%' = PASSWORD('${newPassword}');\"`
|
||||||
});
|
});
|
||||||
} else if (type === 'postgresql') {
|
} else if (type === 'postgresql') {
|
||||||
if (isRoot) {
|
if (isRoot) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
command: `docker exec ${id} psql postgresql://postgres:${rootUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role postgres WITH PASSWORD '${newPassword}'"`
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
command: `docker exec ${id} psql postgresql://${dbUser}:${dbUserPassword}@${id}:5432/${defaultDatabase} -c "ALTER role ${user} WITH PASSWORD '${newPassword}'"`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else if (type === 'mongodb') {
|
} else if (type === 'mongodb') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
command: `docker exec ${id} mongo 'mongodb://${rootUser}:${rootUserPassword}@${id}:27017/admin?readPreference=primary&ssl=false' --eval "db.changeUserPassword('${user}','${newPassword}')"`
|
||||||
});
|
});
|
||||||
} else if (type === 'redis') {
|
} else if (type === 'redis') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
command: `docker exec ${id} redis-cli -u redis://${dbUserPassword}@${id}:6379 --raw CONFIG SET requirepass ${newPassword}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress }: { id: string, configuredPort?: number, exposePort: number, engine: string, remoteEngine: boolean, remoteIpAddress?: string }) {
|
export async function checkExposedPort({
|
||||||
|
id,
|
||||||
|
configuredPort,
|
||||||
|
exposePort,
|
||||||
|
engine,
|
||||||
|
remoteEngine,
|
||||||
|
remoteIpAddress
|
||||||
|
}: {
|
||||||
|
id: string;
|
||||||
|
configuredPort?: number;
|
||||||
|
exposePort: number;
|
||||||
|
engine: string;
|
||||||
|
remoteEngine: boolean;
|
||||||
|
remoteIpAddress?: string;
|
||||||
|
}) {
|
||||||
if (exposePort < 1024 || exposePort > 65535) {
|
if (exposePort < 1024 || exposePort > 65535) {
|
||||||
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` };
|
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` };
|
||||||
}
|
}
|
||||||
if (configuredPort) {
|
if (configuredPort) {
|
||||||
if (configuredPort !== exposePort) {
|
if (configuredPort !== exposePort) {
|
||||||
const availablePort = await getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress);
|
const availablePort = await getFreeExposedPort(
|
||||||
|
id,
|
||||||
|
exposePort,
|
||||||
|
engine,
|
||||||
|
remoteEngine,
|
||||||
|
remoteIpAddress
|
||||||
|
);
|
||||||
if (availablePort.toString() !== exposePort.toString()) {
|
if (availablePort.toString() !== exposePort.toString()) {
|
||||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const availablePort = await getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress);
|
const availablePort = await getFreeExposedPort(
|
||||||
|
id,
|
||||||
|
exposePort,
|
||||||
|
engine,
|
||||||
|
remoteEngine,
|
||||||
|
remoteIpAddress
|
||||||
|
);
|
||||||
if (availablePort.toString() !== exposePort.toString()) {
|
if (availablePort.toString() !== exposePort.toString()) {
|
||||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||||
}
|
}
|
||||||
@@ -1225,25 +1325,33 @@ export async function getFreeExposedPort(id, exposePort, engine, remoteEngine, r
|
|||||||
if (remoteEngine) {
|
if (remoteEngine) {
|
||||||
const applicationUsed = await (
|
const applicationUsed = await (
|
||||||
await prisma.application.findMany({
|
await prisma.application.findMany({
|
||||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { remoteIpAddress } },
|
where: {
|
||||||
|
exposePort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
destinationDocker: { remoteIpAddress }
|
||||||
|
},
|
||||||
select: { exposePort: true }
|
select: { exposePort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.exposePort);
|
).map((a) => a.exposePort);
|
||||||
const serviceUsed = await (
|
const serviceUsed = await (
|
||||||
await prisma.service.findMany({
|
await prisma.service.findMany({
|
||||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { remoteIpAddress } },
|
where: {
|
||||||
|
exposePort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
destinationDocker: { remoteIpAddress }
|
||||||
|
},
|
||||||
select: { exposePort: true }
|
select: { exposePort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.exposePort);
|
).map((a) => a.exposePort);
|
||||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||||
if (usedPorts.includes(exposePort)) {
|
if (usedPorts.includes(exposePort)) {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
const found = await checkPort(exposePort, { host: remoteIpAddress });
|
const found = await checkPort(exposePort, { host: remoteIpAddress });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
return exposePort
|
return exposePort;
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
} else {
|
} else {
|
||||||
const applicationUsed = await (
|
const applicationUsed = await (
|
||||||
await prisma.application.findMany({
|
await prisma.application.findMany({
|
||||||
@@ -1259,13 +1367,13 @@ export async function getFreeExposedPort(id, exposePort, engine, remoteEngine, r
|
|||||||
).map((a) => a.exposePort);
|
).map((a) => a.exposePort);
|
||||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||||
if (usedPorts.includes(exposePort)) {
|
if (usedPorts.includes(exposePort)) {
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
const found = await checkPort(exposePort, { host: 'localhost' });
|
const found = await checkPort(exposePort, { host: 'localhost' });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
return exposePort
|
return exposePort;
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export function generateRangeArray(start, end) {
|
export function generateRangeArray(start, end) {
|
||||||
@@ -1278,38 +1386,54 @@ export async function getFreePublicPort({ id, remoteEngine, engine, remoteIpAddr
|
|||||||
if (remoteEngine) {
|
if (remoteEngine) {
|
||||||
const dbUsed = await (
|
const dbUsed = await (
|
||||||
await prisma.database.findMany({
|
await prisma.database.findMany({
|
||||||
where: { publicPort: { not: null }, id: { not: id }, destinationDocker: { remoteIpAddress } },
|
where: {
|
||||||
|
publicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
destinationDocker: { remoteIpAddress }
|
||||||
|
},
|
||||||
select: { publicPort: true }
|
select: { publicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.publicPort);
|
).map((a) => a.publicPort);
|
||||||
const wpFtpUsed = await (
|
const wpFtpUsed = await (
|
||||||
await prisma.wordpress.findMany({
|
await prisma.wordpress.findMany({
|
||||||
where: { ftpPublicPort: { not: null }, id: { not: id }, service: { destinationDocker: { remoteIpAddress } } },
|
where: {
|
||||||
|
ftpPublicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { remoteIpAddress } }
|
||||||
|
},
|
||||||
select: { ftpPublicPort: true }
|
select: { ftpPublicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.ftpPublicPort);
|
).map((a) => a.ftpPublicPort);
|
||||||
const wpUsed = await (
|
const wpUsed = await (
|
||||||
await prisma.wordpress.findMany({
|
await prisma.wordpress.findMany({
|
||||||
where: { mysqlPublicPort: { not: null }, id: { not: id }, service: { destinationDocker: { remoteIpAddress } } },
|
where: {
|
||||||
|
mysqlPublicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { remoteIpAddress } }
|
||||||
|
},
|
||||||
select: { mysqlPublicPort: true }
|
select: { mysqlPublicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.mysqlPublicPort);
|
).map((a) => a.mysqlPublicPort);
|
||||||
const minioUsed = await (
|
const minioUsed = await (
|
||||||
await prisma.minio.findMany({
|
await prisma.minio.findMany({
|
||||||
where: { publicPort: { not: null }, id: { not: id }, service: { destinationDocker: { remoteIpAddress } } },
|
where: {
|
||||||
|
publicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { remoteIpAddress } }
|
||||||
|
},
|
||||||
select: { publicPort: true }
|
select: { publicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.publicPort);
|
).map((a) => a.publicPort);
|
||||||
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
||||||
const range = generateRangeArray(minPort, maxPort)
|
const range = generateRangeArray(minPort, maxPort);
|
||||||
const availablePorts = range.filter(port => !usedPorts.includes(port))
|
const availablePorts = range.filter((port) => !usedPorts.includes(port));
|
||||||
for (const port of availablePorts) {
|
for (const port of availablePorts) {
|
||||||
const found = await isReachable(port, { host: remoteIpAddress })
|
const found = await isReachable(port, { host: remoteIpAddress });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
return port
|
return port;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
} else {
|
} else {
|
||||||
const dbUsed = await (
|
const dbUsed = await (
|
||||||
await prisma.database.findMany({
|
await prisma.database.findMany({
|
||||||
@@ -1319,32 +1443,44 @@ export async function getFreePublicPort({ id, remoteEngine, engine, remoteIpAddr
|
|||||||
).map((a) => a.publicPort);
|
).map((a) => a.publicPort);
|
||||||
const wpFtpUsed = await (
|
const wpFtpUsed = await (
|
||||||
await prisma.wordpress.findMany({
|
await prisma.wordpress.findMany({
|
||||||
where: { ftpPublicPort: { not: null }, id: { not: id }, service: { destinationDocker: { engine } } },
|
where: {
|
||||||
|
ftpPublicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { engine } }
|
||||||
|
},
|
||||||
select: { ftpPublicPort: true }
|
select: { ftpPublicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.ftpPublicPort);
|
).map((a) => a.ftpPublicPort);
|
||||||
const wpUsed = await (
|
const wpUsed = await (
|
||||||
await prisma.wordpress.findMany({
|
await prisma.wordpress.findMany({
|
||||||
where: { mysqlPublicPort: { not: null }, id: { not: id }, service: { destinationDocker: { engine } } },
|
where: {
|
||||||
|
mysqlPublicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { engine } }
|
||||||
|
},
|
||||||
select: { mysqlPublicPort: true }
|
select: { mysqlPublicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.mysqlPublicPort);
|
).map((a) => a.mysqlPublicPort);
|
||||||
const minioUsed = await (
|
const minioUsed = await (
|
||||||
await prisma.minio.findMany({
|
await prisma.minio.findMany({
|
||||||
where: { publicPort: { not: null }, id: { not: id }, service: { destinationDocker: { engine } } },
|
where: {
|
||||||
|
publicPort: { not: null },
|
||||||
|
id: { not: id },
|
||||||
|
service: { destinationDocker: { engine } }
|
||||||
|
},
|
||||||
select: { publicPort: true }
|
select: { publicPort: true }
|
||||||
})
|
})
|
||||||
).map((a) => a.publicPort);
|
).map((a) => a.publicPort);
|
||||||
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
const usedPorts = [...dbUsed, ...wpFtpUsed, ...wpUsed, ...minioUsed];
|
||||||
const range = generateRangeArray(minPort, maxPort)
|
const range = generateRangeArray(minPort, maxPort);
|
||||||
const availablePorts = range.filter(port => !usedPorts.includes(port))
|
const availablePorts = range.filter((port) => !usedPorts.includes(port));
|
||||||
for (const port of availablePorts) {
|
for (const port of availablePorts) {
|
||||||
const found = await isReachable(port, { host: 'localhost' })
|
const found = await isReachable(port, { host: 'localhost' });
|
||||||
if (!found) {
|
if (!found) {
|
||||||
return port
|
return port;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1369,7 +1505,7 @@ export async function startTraefikTCPProxy(
|
|||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
if (foundDependentContainer && !found) {
|
if (foundDependentContainer && !found) {
|
||||||
const { stdout: Config } = await executeDockerCmd({
|
const { stdout: Config } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||||
});
|
});
|
||||||
@@ -1416,16 +1552,17 @@ export async function startTraefikTCPProxy(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
||||||
});
|
});
|
||||||
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
||||||
}
|
}
|
||||||
if (!foundDependentContainer && found) {
|
if (!foundDependentContainer && found) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker stop -t 0 ${container} && docker rm ${container}`
|
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1449,11 +1586,11 @@ export async function getServiceFromDB({
|
|||||||
serviceSecret: true,
|
serviceSecret: true,
|
||||||
serviceSetting: true,
|
serviceSetting: true,
|
||||||
wordpress: true,
|
wordpress: true,
|
||||||
plausibleAnalytics: true,
|
plausibleAnalytics: true
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (!body) {
|
if (!body) {
|
||||||
return null
|
return null;
|
||||||
}
|
}
|
||||||
// body.type = fixType(body.type);
|
// body.type = fixType(body.type);
|
||||||
|
|
||||||
@@ -1470,7 +1607,6 @@ export async function getServiceFromDB({
|
|||||||
return { ...body, settings };
|
return { ...body, settings };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export function fixType(type) {
|
export function fixType(type) {
|
||||||
return type?.replaceAll(' ', '').toLowerCase() || null;
|
return type?.replaceAll(' ', '').toLowerCase() || null;
|
||||||
}
|
}
|
||||||
@@ -1485,12 +1621,17 @@ export function makeLabelForServices(type) {
|
|||||||
}
|
}
|
||||||
export function errorHandler({
|
export function errorHandler({
|
||||||
status = 500,
|
status = 500,
|
||||||
message = 'Unknown error.'
|
message = 'Unknown error.',
|
||||||
|
type = 'normal'
|
||||||
}: {
|
}: {
|
||||||
status: number;
|
status: number;
|
||||||
message: string | any;
|
message: string | any;
|
||||||
|
type?: string | null;
|
||||||
}) {
|
}) {
|
||||||
if (message.message) message = message.message;
|
if (message.message) message = message.message;
|
||||||
|
if (type === 'normal') {
|
||||||
|
Sentry.captureException(message);
|
||||||
|
}
|
||||||
throw { status, message };
|
throw { status, message };
|
||||||
}
|
}
|
||||||
export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> {
|
export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> {
|
||||||
@@ -1529,9 +1670,9 @@ export async function stopBuild(buildId, applicationId) {
|
|||||||
scheduler.workers.get('deployApplication').postMessage('cancel');
|
scheduler.workers.get('deployApplication').postMessage('cancel');
|
||||||
}
|
}
|
||||||
await cleanupDB(buildId, applicationId);
|
await cleanupDB(buildId, applicationId);
|
||||||
return reject(new Error('Deployment canceled.'));
|
return reject(new Error('Canceled.'));
|
||||||
}
|
}
|
||||||
const { stdout: buildContainers } = await executeDockerCmd({
|
const { stdout: buildContainers } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'`
|
command: `docker container ls --filter "label=coolify.buildId=${buildId}" --format '{{json .}}'`
|
||||||
});
|
});
|
||||||
@@ -1562,7 +1703,7 @@ async function cleanupDB(buildId: string, applicationId: string) {
|
|||||||
if (data?.status === 'queued' || data?.status === 'running') {
|
if (data?.status === 'queued' || data?.status === 'running') {
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
|
||||||
}
|
}
|
||||||
await saveBuildLog({ line: 'Deployment canceled.', buildId, applicationId });
|
await saveBuildLog({ line: 'Canceled.', buildId, applicationId });
|
||||||
}
|
}
|
||||||
|
|
||||||
export function convertTolOldVolumeNames(type) {
|
export function convertTolOldVolumeNames(type) {
|
||||||
@@ -1574,36 +1715,65 @@ export function convertTolOldVolumeNames(type) {
|
|||||||
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||||
// Cleanup old coolify images
|
// Cleanup old coolify images
|
||||||
try {
|
try {
|
||||||
let { stdout: images } = await executeDockerCmd({
|
let { stdout: images } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`
|
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`,
|
||||||
|
shell: true
|
||||||
});
|
});
|
||||||
|
|
||||||
images = images.trim();
|
images = images.trim();
|
||||||
if (images) {
|
if (images) {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rmi -f ${images}" -q | xargs -r` });
|
await executeCommand({
|
||||||
|
dockerId,
|
||||||
|
command: `docker rmi -f ${images}" -q | xargs -r`,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
if (lowDiskSpace || force) {
|
if (lowDiskSpace || force) {
|
||||||
// if (isDev) {
|
// Cleanup images that are not used
|
||||||
// if (!force) console.log(`[DEV MODE] Low disk space: ${lowDiskSpace}`);
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({ dockerId, command: `docker image prune -f` });
|
||||||
|
} catch (error) {}
|
||||||
|
|
||||||
|
const { numberOfDockerImagesKeptLocally } = await prisma.setting.findUnique({
|
||||||
|
where: { id: '0' }
|
||||||
|
});
|
||||||
|
const { stdout: images } = await executeCommand({
|
||||||
|
dockerId,
|
||||||
|
command: `docker images|grep -v "<none>"|grep -v REPOSITORY|awk '{print $1, $2}'`,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
|
const imagesArray = images.trim().replaceAll(' ', ':').split('\n');
|
||||||
|
const imagesSet = new Set(imagesArray.map((image) => image.split(':')[0]));
|
||||||
|
let deleteImage = [];
|
||||||
|
for (const image of imagesSet) {
|
||||||
|
let keepImage = [];
|
||||||
|
for (const image2 of imagesArray) {
|
||||||
|
if (image2.startsWith(image)) {
|
||||||
|
if (keepImage.length >= numberOfDockerImagesKeptLocally) {
|
||||||
|
deleteImage.push(image2);
|
||||||
|
} else {
|
||||||
|
keepImage.push(image2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const image of deleteImage) {
|
||||||
|
await executeCommand({ dockerId, command: `docker image rm -f ${image}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prune coolify managed containers
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||||
});
|
});
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
try {
|
|
||||||
await executeDockerCmd({ dockerId, command: `docker image prune -f` });
|
|
||||||
} catch (error) { }
|
|
||||||
try {
|
|
||||||
await executeDockerCmd({ dockerId, command: `docker image prune -a -f` });
|
|
||||||
} catch (error) { }
|
|
||||||
// Cleanup build caches
|
// Cleanup build caches
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({ dockerId, command: `docker builder prune -a -f` });
|
await executeCommand({ dockerId, command: `docker builder prune -a -f` });
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1614,7 +1784,7 @@ export function persistentVolumes(id, persistentStorage, config) {
|
|||||||
for (const [key, value] of Object.entries(config)) {
|
for (const [key, value] of Object.entries(config)) {
|
||||||
if (value.volumes) {
|
if (value.volumes) {
|
||||||
for (const volume of value.volumes) {
|
for (const volume of value.volumes) {
|
||||||
if (!volume.startsWith('/var/run/docker.sock')) {
|
if (!volume.startsWith('/')) {
|
||||||
volumeSet.add(volume);
|
volumeSet.add(volume);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1685,3 +1855,23 @@ export function decryptApplication(application: any) {
|
|||||||
return application;
|
return application;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function pushToRegistry(
|
||||||
|
application: any,
|
||||||
|
workdir: string,
|
||||||
|
tag: string,
|
||||||
|
imageName: string,
|
||||||
|
customTag: string
|
||||||
|
) {
|
||||||
|
const location = `${workdir}/.docker`;
|
||||||
|
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`;
|
||||||
|
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`;
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: application.destinationDockerId,
|
||||||
|
command: tagCommand
|
||||||
|
});
|
||||||
|
await executeCommand({
|
||||||
|
dockerId: application.destinationDockerId,
|
||||||
|
command: pushCommand
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { executeDockerCmd } from './common';
|
import { executeCommand } from './common';
|
||||||
|
|
||||||
export function formatLabelsOnDocker(data) {
|
export function formatLabelsOnDocker(data) {
|
||||||
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
return data.trim().split('\n').map(a => JSON.parse(a)).map((container) => {
|
||||||
@@ -16,7 +16,7 @@ export function formatLabelsOnDocker(data) {
|
|||||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
||||||
let containerFound = false;
|
let containerFound = false;
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({
|
const { stdout } = await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker inspect --format '{{json .State}}' ${container}`
|
`docker inspect --format '{{json .State}}' ${container}`
|
||||||
@@ -28,14 +28,14 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
|||||||
const isRestarting = status === 'restarting'
|
const isRestarting = status === 'restarting'
|
||||||
const isExited = status === 'exited'
|
const isExited = status === 'exited'
|
||||||
if (status === 'created') {
|
if (status === 'created') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker rm ${container}`
|
`docker rm ${container}`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (remove && status === 'exited') {
|
if (remove && status === 'exited') {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command:
|
command:
|
||||||
`docker rm ${container}`
|
`docker rm ${container}`
|
||||||
@@ -48,7 +48,6 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
|||||||
isRunning,
|
isRunning,
|
||||||
isRestarting,
|
isRestarting,
|
||||||
isExited
|
isExited
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -63,7 +62,7 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
|||||||
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
||||||
let isExited = false;
|
let isExited = false;
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker inspect -f '{{.State.Status}}' ${containerName}` })
|
||||||
if (stdout.trim() === 'exited') {
|
if (stdout.trim() === 'exited') {
|
||||||
isExited = true;
|
isExited = true;
|
||||||
}
|
}
|
||||||
@@ -82,13 +81,13 @@ export async function removeContainer({
|
|||||||
dockerId: string;
|
dockerId: string;
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||||
if (JSON.parse(stdout).Running) {
|
if (JSON.parse(stdout).Running) {
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
}
|
}
|
||||||
if (JSON.parse(stdout).Status === 'exited') {
|
if (JSON.parse(stdout).Status === 'exited') {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import jsonwebtoken from 'jsonwebtoken';
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import { saveBuildLog } from '../buildPacks/common';
|
import { saveBuildLog } from '../buildPacks/common';
|
||||||
import { asyncExecShell, decrypt, prisma } from '../common';
|
import { decrypt, executeCommand, prisma } from '../common';
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
@@ -9,6 +9,7 @@ export default async function ({
|
|||||||
githubAppId,
|
githubAppId,
|
||||||
repository,
|
repository,
|
||||||
apiUrl,
|
apiUrl,
|
||||||
|
gitCommitHash,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@@ -20,6 +21,7 @@ export default async function ({
|
|||||||
githubAppId: string;
|
githubAppId: string;
|
||||||
repository: string;
|
repository: string;
|
||||||
apiUrl: string;
|
apiUrl: string;
|
||||||
|
gitCommitHash?: string;
|
||||||
htmlUrl: string;
|
htmlUrl: string;
|
||||||
branch: string;
|
branch: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
@@ -28,16 +30,24 @@ export default async function ({
|
|||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
const { default: got } = await import('got')
|
const { default: got } = await import('got')
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||||
await saveBuildLog({ line: 'GitHub importer started.', buildId, applicationId });
|
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
await asyncExecShell(
|
if (gitCommitHash) {
|
||||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
await saveBuildLog({
|
||||||
);
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await executeCommand({
|
||||||
|
command:
|
||||||
|
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||||
@@ -62,15 +72,23 @@ export default async function ({
|
|||||||
})
|
})
|
||||||
.json();
|
.json();
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
if (gitCommitHash) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
await asyncExecShell(
|
|
||||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git submodule update --init --recursive && git lfs pull && cd .. `
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
await executeCommand({
|
||||||
|
command:
|
||||||
|
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||||
|
shell: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
import { saveBuildLog } from "../buildPacks/common";
|
import { saveBuildLog } from "../buildPacks/common";
|
||||||
import { asyncExecShell } from "../common";
|
import { executeCommand } from "../common";
|
||||||
|
|
||||||
export default async function ({
|
export default async function ({
|
||||||
applicationId,
|
applicationId,
|
||||||
workdir,
|
workdir,
|
||||||
repodir,
|
repodir,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
|
gitCommitHash,
|
||||||
repository,
|
repository,
|
||||||
branch,
|
branch,
|
||||||
buildId,
|
buildId,
|
||||||
@@ -20,34 +21,43 @@ export default async function ({
|
|||||||
branch: string;
|
branch: string;
|
||||||
buildId: string;
|
buildId: string;
|
||||||
repodir: string;
|
repodir: string;
|
||||||
|
gitCommitHash: string;
|
||||||
privateSshKey: string;
|
privateSshKey: string;
|
||||||
customPort: number;
|
customPort: number;
|
||||||
forPublic: boolean;
|
forPublic: boolean;
|
||||||
}): Promise<string> {
|
}): Promise<string> {
|
||||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
|
||||||
|
|
||||||
if (!forPublic) {
|
if (!forPublic) {
|
||||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||||
}
|
}
|
||||||
|
|
||||||
await saveBuildLog({
|
await saveBuildLog({
|
||||||
line: `Cloning ${repository}:${branch} branch.`,
|
line: `Cloning ${repository}:${branch}...`,
|
||||||
buildId,
|
buildId,
|
||||||
applicationId
|
applicationId
|
||||||
});
|
});
|
||||||
|
if (gitCommitHash) {
|
||||||
|
await saveBuildLog({
|
||||||
|
line: `Checking out ${gitCommitHash} commit...`,
|
||||||
|
buildId,
|
||||||
|
applicationId
|
||||||
|
});
|
||||||
|
}
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
command:
|
||||||
|
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
command:
|
||||||
|
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||||
return commit.replace('\n', '');
|
return commit.replace('\n', '');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ Bree.extend(TSBree);
|
|||||||
|
|
||||||
const options: any = {
|
const options: any = {
|
||||||
defaultExtension: 'js',
|
defaultExtension: 'js',
|
||||||
logger: new Cabin(),
|
logger: false,
|
||||||
// logger: false,
|
// logger: false,
|
||||||
// workerMessageHandler: async ({ name, message }) => {
|
// workerMessageHandler: async ({ name, message }) => {
|
||||||
// if (name === 'deployApplication' && message?.deploying) {
|
// if (name === 'deployApplication' && message?.deploying) {
|
||||||
|
|||||||
@@ -1,145 +1,20 @@
|
|||||||
import { isDev } from "./common";
|
import { isARM, isDev } from './common';
|
||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
export async function getTemplates() {
|
export async function getTemplates() {
|
||||||
let templates: any = [];
|
const templatePath = isDev ? './templates.json' : '/app/templates.json';
|
||||||
if (isDev) {
|
const open = await fs.open(templatePath, 'r');
|
||||||
templates = JSON.parse(await (await fs.readFile('./templates.json')).toString())
|
try {
|
||||||
} else {
|
let data = await open.readFile({ encoding: 'utf-8' });
|
||||||
templates = JSON.parse(await (await fs.readFile('/app/templates.json')).toString())
|
let jsonData = JSON.parse(data);
|
||||||
|
if (isARM(process.arch)) {
|
||||||
|
jsonData = jsonData.filter((d) => d.arch !== 'amd64');
|
||||||
|
}
|
||||||
|
return jsonData;
|
||||||
|
} catch (error) {
|
||||||
|
return [];
|
||||||
|
} finally {
|
||||||
|
await open?.close();
|
||||||
}
|
}
|
||||||
// if (!isDev) {
|
|
||||||
// templates.push({
|
|
||||||
// "templateVersion": "1.0.0",
|
|
||||||
// "defaultVersion": "latest",
|
|
||||||
// "name": "Test-Fake-Service",
|
|
||||||
// "description": "",
|
|
||||||
// "services": {
|
|
||||||
// "$$id": {
|
|
||||||
// "name": "Test-Fake-Service",
|
|
||||||
// "depends_on": [
|
|
||||||
// "$$id-postgresql",
|
|
||||||
// "$$id-redis"
|
|
||||||
// ],
|
|
||||||
// "image": "weblate/weblate:$$core_version",
|
|
||||||
// "volumes": [
|
|
||||||
// "$$id-data:/app/data",
|
|
||||||
// ],
|
|
||||||
// "environment": [
|
|
||||||
// `POSTGRES_SECRET=$$secret_postgres_secret`,
|
|
||||||
// `WEBLATE_SITE_DOMAIN=$$config_weblate_site_domain`,
|
|
||||||
// `WEBLATE_ADMIN_PASSWORD=$$secret_weblate_admin_password`,
|
|
||||||
// `POSTGRES_PASSWORD=$$secret_postgres_password`,
|
|
||||||
// `POSTGRES_USER=$$config_postgres_user`,
|
|
||||||
// `POSTGRES_DATABASE=$$config_postgres_db`,
|
|
||||||
// `POSTGRES_HOST=$$id-postgresql`,
|
|
||||||
// `POSTGRES_PORT=5432`,
|
|
||||||
// `REDIS_HOST=$$id-redis`,
|
|
||||||
// ],
|
|
||||||
// "ports": [
|
|
||||||
// "8080"
|
|
||||||
// ]
|
|
||||||
// },
|
|
||||||
// "$$id-postgresql": {
|
|
||||||
// "name": "PostgreSQL",
|
|
||||||
// "depends_on": [],
|
|
||||||
// "image": "postgres:14-alpine",
|
|
||||||
// "volumes": [
|
|
||||||
// "$$id-postgresql-data:/var/lib/postgresql/data",
|
|
||||||
// ],
|
|
||||||
// "environment": [
|
|
||||||
// "POSTGRES_USER=$$config_postgres_user",
|
|
||||||
// "POSTGRES_PASSWORD=$$secret_postgres_password",
|
|
||||||
// "POSTGRES_DB=$$config_postgres_db",
|
|
||||||
// ],
|
|
||||||
// "ports": []
|
|
||||||
// },
|
|
||||||
// "$$id-redis": {
|
|
||||||
// "name": "Redis",
|
|
||||||
// "depends_on": [],
|
|
||||||
// "image": "redis:7-alpine",
|
|
||||||
// "volumes": [
|
|
||||||
// "$$id-redis-data:/data",
|
|
||||||
// ],
|
|
||||||
// "environment": [],
|
|
||||||
// "ports": [],
|
|
||||||
// }
|
|
||||||
// },
|
|
||||||
// "variables": [
|
|
||||||
// {
|
|
||||||
// "id": "$$config_weblate_site_domain",
|
|
||||||
// "main": "$$id",
|
|
||||||
// "name": "WEBLATE_SITE_DOMAIN",
|
|
||||||
// "label": "Weblate Domain",
|
|
||||||
// "defaultValue": "$$generate_domain",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$secret_weblate_admin_password",
|
|
||||||
// "main": "$$id",
|
|
||||||
// "name": "WEBLATE_ADMIN_PASSWORD",
|
|
||||||
// "label": "Weblate Admin Password",
|
|
||||||
// "defaultValue": "$$generate_password",
|
|
||||||
// "description": "",
|
|
||||||
// "extras": {
|
|
||||||
// "isVisibleOnUI": true,
|
|
||||||
// }
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$secret_weblate_admin_password2",
|
|
||||||
// "name": "WEBLATE_ADMIN_PASSWORD2",
|
|
||||||
// "label": "Weblate Admin Password2",
|
|
||||||
// "defaultValue": "$$generate_password",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$config_postgres_user",
|
|
||||||
// "main": "$$id-postgresql",
|
|
||||||
// "name": "POSTGRES_USER",
|
|
||||||
// "label": "PostgreSQL User",
|
|
||||||
// "defaultValue": "$$generate_username",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$secret_postgres_password",
|
|
||||||
// "main": "$$id-postgresql",
|
|
||||||
// "name": "POSTGRES_PASSWORD",
|
|
||||||
// "label": "PostgreSQL Password",
|
|
||||||
// "defaultValue": "$$generate_password(32)",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$secret_postgres_password_hex32",
|
|
||||||
// "name": "POSTGRES_PASSWORD_hex32",
|
|
||||||
// "label": "PostgreSQL Password hex32",
|
|
||||||
// "defaultValue": "$$generate_hex(32)",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$config_postgres_something_hex32",
|
|
||||||
// "name": "POSTGRES_SOMETHING_HEX32",
|
|
||||||
// "label": "PostgreSQL Something hex32",
|
|
||||||
// "defaultValue": "$$generate_hex(32)",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$config_postgres_db",
|
|
||||||
// "main": "$$id-postgresql",
|
|
||||||
// "name": "POSTGRES_DB",
|
|
||||||
// "label": "PostgreSQL Database",
|
|
||||||
// "defaultValue": "weblate",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// "id": "$$secret_postgres_secret",
|
|
||||||
// "name": "POSTGRES_SECRET",
|
|
||||||
// "label": "PostgreSQL Secret",
|
|
||||||
// "defaultValue": "",
|
|
||||||
// "description": "",
|
|
||||||
// },
|
|
||||||
// ]
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
return templates
|
|
||||||
}
|
}
|
||||||
const compareSemanticVersions = (a: string, b: string) => {
|
const compareSemanticVersions = (a: string, b: string) => {
|
||||||
const a1 = a.split('.');
|
const a1 = a.split('.');
|
||||||
@@ -155,16 +30,18 @@ const compareSemanticVersions = (a: string, b: string) => {
|
|||||||
return b1.length - a1.length;
|
return b1.length - a1.length;
|
||||||
};
|
};
|
||||||
export async function getTags(type: string) {
|
export async function getTags(type: string) {
|
||||||
|
try {
|
||||||
if (type) {
|
if (type) {
|
||||||
let tags: any = [];
|
const tagsPath = isDev ? './tags.json' : '/app/tags.json';
|
||||||
if (isDev) {
|
const data = await fs.readFile(tagsPath, 'utf8');
|
||||||
tags = JSON.parse(await (await fs.readFile('./tags.json')).toString())
|
let tags = JSON.parse(data);
|
||||||
} else {
|
if (tags) {
|
||||||
tags = JSON.parse(await (await fs.readFile('/app/tags.json')).toString())
|
tags = tags.find((tag: any) => tag.name.includes(type));
|
||||||
}
|
|
||||||
tags = tags.find((tag: any) => tag.name.includes(type))
|
|
||||||
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
|
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
|
||||||
return tags
|
return tags;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return [];
|
||||||
}
|
}
|
||||||
return []
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
|
||||||
import { prisma } from '../common';
|
import { decrypt, prisma } from '../common';
|
||||||
|
|
||||||
export async function removeService({ id }: { id: string }): Promise<void> {
|
export async function removeService({ id }: { id: string }): Promise<void> {
|
||||||
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||||
@@ -23,3 +23,17 @@ export async function removeService({ id }: { id: string }): Promise<void> {
|
|||||||
|
|
||||||
await prisma.service.delete({ where: { id } });
|
await prisma.service.delete({ where: { id } });
|
||||||
}
|
}
|
||||||
|
export async function verifyAndDecryptServiceSecrets(id: string) {
|
||||||
|
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
|
||||||
|
let decryptedSecrets = secrets.map(secret => {
|
||||||
|
const { name, value } = secret
|
||||||
|
if (value) {
|
||||||
|
let rawValue = decrypt(value)
|
||||||
|
rawValue = rawValue.replaceAll(/\$/gi, '$$$')
|
||||||
|
return { name, value: rawValue }
|
||||||
|
}
|
||||||
|
return { name, value }
|
||||||
|
|
||||||
|
})
|
||||||
|
return decryptedSecrets
|
||||||
|
}
|
||||||
@@ -2,11 +2,12 @@ import type { FastifyReply, FastifyRequest } from 'fastify';
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeDockerCmd, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
import { asyncSleep, ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, errorHandler, executeCommand, getServiceFromDB, isARM, makeLabelForServices, persistentVolumes, prisma, stopTcpHttpProxy } from '../common';
|
||||||
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
import { parseAndFindServiceTemplates } from '../../routes/api/v1/services/handlers';
|
||||||
|
|
||||||
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
import { ServiceStartStop } from '../../routes/api/v1/services/types';
|
||||||
import { OnlyId } from '../../types';
|
import { OnlyId } from '../../types';
|
||||||
|
import { verifyAndDecryptServiceSecrets } from './common';
|
||||||
|
|
||||||
export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
||||||
try {
|
try {
|
||||||
@@ -14,14 +15,19 @@ export async function stopService(request: FastifyRequest<ServiceStartStop>) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
const { destinationDockerId } = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Could not stop containers.' }
|
throw { status: 500, message: 'Could not stop containers.' }
|
||||||
@@ -34,7 +40,7 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
const { id } = request.params;
|
const { id } = request.params;
|
||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const service = await getServiceFromDB({ id, teamId });
|
const service = await getServiceFromDB({ id, teamId });
|
||||||
const arm = isARM(service.arch)
|
const arm = isARM(service.arch);
|
||||||
const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } =
|
const { type, destinationDockerId, destinationDocker, persistentStorage, exposePort } =
|
||||||
service;
|
service;
|
||||||
|
|
||||||
@@ -65,15 +71,17 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const secrets = await verifyAndDecryptServiceSecrets(id)
|
||||||
const secrets = await prisma.serviceSecret.findMany({ where: { serviceId: id } })
|
|
||||||
for (const secret of secrets) {
|
for (const secret of secrets) {
|
||||||
const { name, value } = secret
|
const { name, value } = secret
|
||||||
if (value) {
|
if (value) {
|
||||||
const foundEnv = !!template.services[s].environment?.find(env => env.startsWith(`${name}=`))
|
const foundEnv = !!template.services[s].environment?.find(env => env.startsWith(`${name}=`))
|
||||||
const foundNewEnv = !!newEnvironments?.find(env => env.startsWith(`${name}=`))
|
const foundNewEnv = !!newEnvironments?.find(env => env.startsWith(`${name}=`))
|
||||||
if (foundEnv && !foundNewEnv) {
|
if (foundEnv && !foundNewEnv) {
|
||||||
newEnvironments.push(`${name}=${decrypt(value)}`)
|
newEnvironments.push(`${name}=${value}`)
|
||||||
|
}
|
||||||
|
if (!foundEnv && !foundNewEnv && s === id) {
|
||||||
|
newEnvironments.push(`${name}=${value}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -103,15 +111,34 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let ports = []
|
||||||
|
if (template.services[s].proxy?.length > 0) {
|
||||||
|
for (const proxy of template.services[s].proxy) {
|
||||||
|
if (proxy.hostPort) {
|
||||||
|
ports.push(`${proxy.hostPort}:${proxy.port}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (template.services[s].ports?.length === 1) {
|
||||||
|
for (const port of template.services[s].ports) {
|
||||||
|
if (exposePort) {
|
||||||
|
ports.push(`${exposePort}:${port}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let image = template.services[s].image
|
||||||
|
if (arm && template.services[s].imageArm) {
|
||||||
|
image = template.services[s].imageArm
|
||||||
|
}
|
||||||
config[s] = {
|
config[s] = {
|
||||||
container_name: s,
|
container_name: s,
|
||||||
build: template.services[s].build || undefined,
|
build: template.services[s].build || undefined,
|
||||||
command: template.services[s].command,
|
command: template.services[s].command,
|
||||||
entrypoint: template.services[s]?.entrypoint,
|
entrypoint: template.services[s]?.entrypoint,
|
||||||
image: arm ? template.services[s].imageArm : template.services[s].image,
|
image,
|
||||||
expose: template.services[s].ports,
|
expose: template.services[s].ports,
|
||||||
...(exposePort ? { ports: [`${exposePort}:${exposePort}`] } : {}),
|
ports: ports.length > 0 ? ports : undefined,
|
||||||
volumes: Array.from(volumes),
|
volumes: Array.from(volumes),
|
||||||
environment: newEnvironments,
|
environment: newEnvironments,
|
||||||
depends_on: template.services[s]?.depends_on,
|
depends_on: template.services[s]?.depends_on,
|
||||||
@@ -121,7 +148,6 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
labels: makeLabelForServices(type),
|
labels: makeLabelForServices(type),
|
||||||
...defaultComposeConfiguration(network),
|
...defaultComposeConfiguration(network),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate files for builds
|
// Generate files for builds
|
||||||
if (template.services[s]?.files?.length > 0) {
|
if (template.services[s]?.files?.length > 0) {
|
||||||
if (!config[s].build) {
|
if (!config[s].build) {
|
||||||
@@ -161,21 +187,37 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
// Workaround: Stop old minio proxies
|
// Workaround: Stop old minio proxies
|
||||||
if (service.type === 'minio') {
|
if (service.type === 'minio') {
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container stop -t 0`
|
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||||
});
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}|xargs -r -n 1 docker container rm -f`
|
`docker container ls -a --filter 'name=${id}-' --format {{.ID}}`
|
||||||
});
|
});
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
|
||||||
}
|
}
|
||||||
return {}
|
return {}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
@@ -185,16 +227,16 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
|||||||
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
async function startServiceContainers(fastify, id, teamId, dockerId, composeFileDestination) {
|
||||||
try {
|
try {
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Pulling images...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} pull` })
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Building images...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} build --no-cache` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Creating containers...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} create` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 'Starting containers...' })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} start` })
|
||||||
await asyncSleep(1000);
|
await asyncSleep(1000);
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
await executeCommand({ dockerId, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||||
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
fastify.io.to(teamId).emit(`start-service`, { serviceId: id, state: 0 })
|
||||||
}
|
}
|
||||||
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||||
@@ -206,7 +248,7 @@ export async function migrateAppwriteDB(request: FastifyRequest<OnlyId>, reply:
|
|||||||
destinationDocker,
|
destinationDocker,
|
||||||
} = await getServiceFromDB({ id, teamId });
|
} = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id} migrate`
|
command: `docker exec ${id} migrate`
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ import yaml from 'js-yaml';
|
|||||||
import csv from 'csvtojson';
|
import csv from 'csvtojson';
|
||||||
|
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
import { saveDockerRegistryCredentials, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||||
import { checkDomainsIsValidInDNS, checkExposedPort, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
import { checkDomainsIsValidInDNS, checkExposedPort, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeCommand, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||||
import { checkContainer, formatLabelsOnDocker, removeContainer } from '../../../../lib/docker';
|
import { checkContainer, formatLabelsOnDocker, removeContainer } from '../../../../lib/docker';
|
||||||
|
|
||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds } from './types';
|
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds, RestartApplication } from './types';
|
||||||
import { OnlyId } from '../../../../types';
|
import { OnlyId } from '../../../../types';
|
||||||
|
|
||||||
function filterObject(obj, callback) {
|
function filterObject(obj, callback) {
|
||||||
@@ -78,7 +78,7 @@ export async function cleanupUnconfiguredApplications(request: FastifyRequest<an
|
|||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
||||||
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
|
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
|
||||||
})
|
})
|
||||||
@@ -113,7 +113,7 @@ export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const application: any = await getApplicationFromDB(id, teamId);
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
if (application.buildPack === 'compose') {
|
if (application.buildPack === 'compose') {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
||||||
@@ -241,7 +241,8 @@ export async function getApplicationFromDB(id: string, teamId: string) {
|
|||||||
secrets: true,
|
secrets: true,
|
||||||
persistentStorage: true,
|
persistentStorage: true,
|
||||||
connectedDatabase: true,
|
connectedDatabase: true,
|
||||||
previewApplication: true
|
previewApplication: true,
|
||||||
|
dockerRegistry: true
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (!application) {
|
if (!application) {
|
||||||
@@ -280,7 +281,7 @@ export async function getApplicationFromDBWebhook(projectId: number, branch: str
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (applications.length === 0) {
|
if (applications.length === 0) {
|
||||||
throw { status: 500, message: 'Application not configured.' }
|
throw { status: 500, message: 'Application not configured.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
applications = applications.map((application: any) => {
|
applications = applications.map((application: any) => {
|
||||||
application = decryptApplication(application);
|
application = decryptApplication(application);
|
||||||
@@ -302,8 +303,8 @@ export async function getApplicationFromDBWebhook(projectId: number, branch: str
|
|||||||
|
|
||||||
return applications;
|
return applications;
|
||||||
|
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
export async function saveApplication(request: FastifyRequest<SaveApplication>, reply: FastifyReply) {
|
||||||
@@ -326,13 +327,16 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
denoMainFile,
|
denoMainFile,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
|
gitCommitHash,
|
||||||
baseImage,
|
baseImage,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
baseDatabaseBranch,
|
baseDatabaseBranch,
|
||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName
|
||||||
} = request.body
|
} = request.body
|
||||||
if (port) port = Number(port);
|
if (port) port = Number(port);
|
||||||
if (exposePort) {
|
if (exposePort) {
|
||||||
@@ -350,6 +354,7 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
publishDirectory,
|
publishDirectory,
|
||||||
baseDirectory,
|
baseDirectory,
|
||||||
dockerFileLocation,
|
dockerFileLocation,
|
||||||
|
dockerComposeFileLocation,
|
||||||
denoMainFile
|
denoMainFile
|
||||||
});
|
});
|
||||||
if (baseDatabaseBranch) {
|
if (baseDatabaseBranch) {
|
||||||
@@ -364,11 +369,14 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
pythonVariable,
|
pythonVariable,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
|
gitCommitHash,
|
||||||
baseBuildImage,
|
baseBuildImage,
|
||||||
deploymentType,
|
deploymentType,
|
||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName,
|
||||||
...defaultConfiguration,
|
...defaultConfiguration,
|
||||||
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
|
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
|
||||||
}
|
}
|
||||||
@@ -382,6 +390,7 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
exposePort,
|
exposePort,
|
||||||
pythonWSGI,
|
pythonWSGI,
|
||||||
pythonModule,
|
pythonModule,
|
||||||
|
gitCommitHash,
|
||||||
pythonVariable,
|
pythonVariable,
|
||||||
denoOptions,
|
denoOptions,
|
||||||
baseImage,
|
baseImage,
|
||||||
@@ -390,6 +399,8 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
|||||||
dockerComposeFile,
|
dockerComposeFile,
|
||||||
dockerComposeFileLocation,
|
dockerComposeFileLocation,
|
||||||
dockerComposeConfiguration,
|
dockerComposeConfiguration,
|
||||||
|
simpleDockerfile,
|
||||||
|
dockerRegistryImageName,
|
||||||
...defaultConfiguration
|
...defaultConfiguration
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -438,16 +449,17 @@ export async function stopPreviewApplication(request: FastifyRequest<StopPreview
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function restartApplication(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function restartApplication(request: FastifyRequest<RestartApplication>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
|
const { imageId = null } = request.body
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user
|
||||||
let application: any = await getApplicationFromDB(id, teamId);
|
let application: any = await getApplicationFromDB(id, teamId);
|
||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
const buildId = cuid();
|
const buildId = cuid();
|
||||||
const { id: dockerId, network } = application.destinationDocker;
|
const { id: dockerId, network } = application.destinationDocker;
|
||||||
const { secrets, pullmergeRequestId, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
const { dockerRegistry, secrets, pullmergeRequestId, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
||||||
|
let location = null;
|
||||||
const envs = [
|
const envs = [
|
||||||
`PORT=${port}`
|
`PORT=${port}`
|
||||||
];
|
];
|
||||||
@@ -456,13 +468,13 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -470,7 +482,10 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
const { workdir } = await createDirectories({ repository, buildId });
|
const { workdir } = await createDirectories({ repository, buildId });
|
||||||
const labels = []
|
const labels = []
|
||||||
let image = null
|
let image = null
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'` })
|
if (imageId) {
|
||||||
|
image = imageId
|
||||||
|
} else {
|
||||||
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'` })
|
||||||
const containersArray = container.trim().split('\n');
|
const containersArray = container.trim().split('\n');
|
||||||
for (const container of containersArray) {
|
for (const container of containersArray) {
|
||||||
const containerObj = formatLabelsOnDocker(container);
|
const containerObj = formatLabelsOnDocker(container);
|
||||||
@@ -481,17 +496,34 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
let imageFound = false;
|
}
|
||||||
|
if (dockerRegistry) {
|
||||||
|
const { url, username, password } = dockerRegistry
|
||||||
|
location = await saveDockerRegistryCredentials({ url, username, password, workdir })
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageFoundLocally = false;
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker image inspect ${image}`
|
command: `docker image inspect ${image}`
|
||||||
})
|
})
|
||||||
imageFound = true;
|
imageFoundLocally = true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
//
|
//
|
||||||
}
|
}
|
||||||
if (!imageFound) {
|
let imageFoundRemotely = false;
|
||||||
|
try {
|
||||||
|
await executeCommand({
|
||||||
|
dockerId,
|
||||||
|
command: `docker ${location ? `--config ${location}` : ''} pull ${image}`
|
||||||
|
})
|
||||||
|
imageFoundRemotely = true;
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!imageFoundLocally && !imageFoundRemotely) {
|
||||||
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
||||||
}
|
}
|
||||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||||
@@ -537,9 +569,14 @@ export async function restartApplication(request: FastifyRequest<OnlyId>, reply:
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
try {
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({ dockerId, command: `docker rm ${id}` })
|
||||||
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
await executeCommand({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
return reply.code(201).send();
|
return reply.code(201).send();
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||||
@@ -555,7 +592,7 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
|
|||||||
if (application?.destinationDockerId) {
|
if (application?.destinationDockerId) {
|
||||||
const { id: dockerId } = application.destinationDocker;
|
const { id: dockerId } = application.destinationDocker;
|
||||||
if (application.buildPack === 'compose') {
|
if (application.buildPack === 'compose') {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=coolify.applicationId=${id}" --format '{{json .}}'`
|
||||||
@@ -590,7 +627,7 @@ export async function deleteApplication(request: FastifyRequest<DeleteApplicatio
|
|||||||
include: { destinationDocker: true }
|
include: { destinationDocker: true }
|
||||||
});
|
});
|
||||||
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: application.destinationDocker.id,
|
dockerId: application.destinationDocker.id,
|
||||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||||
})
|
})
|
||||||
@@ -676,6 +713,47 @@ export async function getUsage(request) {
|
|||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export async function getDockerImages(request) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const teamId = request.user?.teamId;
|
||||||
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
|
let imagesAvailables = [];
|
||||||
|
try {
|
||||||
|
const { stdout } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}' | grep -i ${id} | grep -v cache`, shell: true });
|
||||||
|
const { stdout: runningImage } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}` });
|
||||||
|
const images = stdout.trim().split('\n');
|
||||||
|
|
||||||
|
for (const image of images) {
|
||||||
|
const [repository, tag, createdAt] = image.split('#');
|
||||||
|
if (tag.includes('-')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const [year, time] = createdAt.split(' ');
|
||||||
|
imagesAvailables.push({
|
||||||
|
repository,
|
||||||
|
tag,
|
||||||
|
createdAt: day(year + time).unix()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
imagesAvailables = imagesAvailables.sort((a, b) => b.tag - a.tag);
|
||||||
|
|
||||||
|
return {
|
||||||
|
imagesAvailables,
|
||||||
|
runningImage
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
imagesAvailables,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getUsageByContainer(request) {
|
export async function getUsageByContainer(request) {
|
||||||
try {
|
try {
|
||||||
@@ -718,6 +796,7 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
|||||||
await prisma.application.update({ where: { id }, data: { configHash } });
|
await prisma.application.update({ where: { id }, data: { configHash } });
|
||||||
}
|
}
|
||||||
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
await prisma.application.update({ where: { id }, data: { updatedAt: new Date() } });
|
||||||
|
if (application.gitSourceId) {
|
||||||
await prisma.build.create({
|
await prisma.build.create({
|
||||||
data: {
|
data: {
|
||||||
id: buildId,
|
id: buildId,
|
||||||
@@ -734,6 +813,20 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
|||||||
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
|
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
await prisma.build.create({
|
||||||
|
data: {
|
||||||
|
id: buildId,
|
||||||
|
applicationId: id,
|
||||||
|
branch: 'latest',
|
||||||
|
forceRebuild,
|
||||||
|
destinationDockerId: application.destinationDocker?.id,
|
||||||
|
status: 'queued',
|
||||||
|
type: 'manual'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
buildId
|
buildId
|
||||||
};
|
};
|
||||||
@@ -748,20 +841,28 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
|||||||
export async function saveApplicationSource(request: FastifyRequest<SaveApplicationSource>, reply: FastifyReply) {
|
export async function saveApplicationSource(request: FastifyRequest<SaveApplicationSource>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const { gitSourceId, forPublic, type } = request.body
|
const { gitSourceId, forPublic, type, simpleDockerfile } = request.body
|
||||||
if (forPublic) {
|
if (forPublic) {
|
||||||
const publicGit = await prisma.gitSource.findFirst({ where: { type, forPublic } });
|
const publicGit = await prisma.gitSource.findFirst({ where: { type, forPublic } });
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { gitSource: { connect: { id: publicGit.id } } }
|
data: { gitSource: { connect: { id: publicGit.id } } }
|
||||||
});
|
});
|
||||||
} else {
|
}
|
||||||
|
if (simpleDockerfile) {
|
||||||
|
await prisma.application.update({
|
||||||
|
where: { id },
|
||||||
|
data: { simpleDockerfile, settings: { update: { autodeploy: false } } }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (gitSourceId) {
|
||||||
await prisma.application.update({
|
await prisma.application.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { gitSource: { connect: { id: gitSourceId } } }
|
data: { gitSource: { connect: { id: gitSourceId } } }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@@ -864,11 +965,11 @@ export async function getBuildPack(request) {
|
|||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const application: any = await getApplicationFromDB(id, teamId);
|
const application: any = await getApplicationFromDB(id, teamId);
|
||||||
return {
|
return {
|
||||||
type: application.gitSource.type,
|
type: application.gitSource?.type || 'dockerRegistry',
|
||||||
projectId: application.projectId,
|
projectId: application.projectId,
|
||||||
repository: application.repository,
|
repository: application.repository,
|
||||||
branch: application.branch,
|
branch: application.branch,
|
||||||
apiUrl: application.gitSource.apiUrl,
|
apiUrl: application.gitSource?.apiUrl || null,
|
||||||
isPublicRepository: application.settings.isPublicRepository
|
isPublicRepository: application.settings.isPublicRepository
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
@@ -876,6 +977,16 @@ export async function getBuildPack(request) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function saveRegistry(request, reply) {
|
||||||
|
try {
|
||||||
|
const { id } = request.params
|
||||||
|
const { registryId } = request.body
|
||||||
|
await prisma.application.update({ where: { id }, data: { dockerRegistry: { connect: { id: registryId } } } });
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
export async function saveBuildPack(request, reply) {
|
export async function saveBuildPack(request, reply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
@@ -1058,13 +1169,13 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
if (pullmergeRequestId) {
|
if (pullmergeRequestId) {
|
||||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||||
if (isSecretFound.length > 0) {
|
if (isSecretFound.length > 0) {
|
||||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
envs.push(`${secret.name}='${isSecretFound[0].value}'`);
|
||||||
} else {
|
} else {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (!secret.isPRMRSecret) {
|
if (!secret.isPRMRSecret) {
|
||||||
envs.push(`${secret.name}=${secret.value}`);
|
envs.push(`${secret.name}='${secret.value}'`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -1072,7 +1183,7 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
const { workdir } = await createDirectories({ repository, buildId });
|
const { workdir } = await createDirectories({ repository, buildId });
|
||||||
const labels = []
|
const labels = []
|
||||||
let image = null
|
let image = null
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
|
||||||
const containersArray = container.trim().split('\n');
|
const containersArray = container.trim().split('\n');
|
||||||
for (const container of containersArray) {
|
for (const container of containersArray) {
|
||||||
const containerObj = formatLabelsOnDocker(container);
|
const containerObj = formatLabelsOnDocker(container);
|
||||||
@@ -1085,7 +1196,7 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
}
|
}
|
||||||
let imageFound = false;
|
let imageFound = false;
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId,
|
dockerId,
|
||||||
command: `docker image inspect ${image}`
|
command: `docker image inspect ${image}`
|
||||||
})
|
})
|
||||||
@@ -1139,9 +1250,9 @@ export async function restartPreview(request: FastifyRequest<RestartPreviewAppli
|
|||||||
volumes: Object.assign({}, ...composeVolumes)
|
volumes: Object.assign({}, ...composeVolumes)
|
||||||
};
|
};
|
||||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
|
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
|
await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
|
||||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
await executeCommand({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||||
return reply.code(201).send();
|
return reply.code(201).send();
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||||
@@ -1182,7 +1293,7 @@ export async function loadPreviews(request: FastifyRequest<OnlyId>) {
|
|||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||||
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
const { stdout } = await executeCommand({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
||||||
if (stdout === '') {
|
if (stdout === '') {
|
||||||
throw { status: 500, message: 'No previews found.' }
|
throw { status: 500, message: 'No previews found.' }
|
||||||
}
|
}
|
||||||
@@ -1257,7 +1368,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
@@ -1448,19 +1559,19 @@ export async function createdBranchDatabase(database: any, baseDatabaseBranch: s
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type === 'postgresql') {
|
if (type === 'postgresql') {
|
||||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} pg_dump -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/${baseDatabaseBranch}" --encoding=UTF8 --schema-only -f /tmp/${baseDatabaseBranch}.dump`
|
command: `docker exec ${id} pg_dump -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/${baseDatabaseBranch}" --encoding=UTF8 --schema-only -f /tmp/${baseDatabaseBranch}.dump`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "CREATE DATABASE branch_${pullmergeRequestId}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "CREATE DATABASE branch_${pullmergeRequestId}"`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/branch_${pullmergeRequestId}" -f /tmp/${baseDatabaseBranch}.dump`
|
command: `docker exec ${id} psql -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/branch_${pullmergeRequestId}" -f /tmp/${baseDatabaseBranch}.dump`
|
||||||
})
|
})
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "ALTER DATABASE branch_${pullmergeRequestId} OWNER TO ${dbUser}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "ALTER DATABASE branch_${pullmergeRequestId} OWNER TO ${dbUser}"`
|
||||||
})
|
})
|
||||||
@@ -1479,12 +1590,12 @@ export async function removeBranchDatabase(database: any, pullmergeRequestId: st
|
|||||||
if (type === 'postgresql') {
|
if (type === 'postgresql') {
|
||||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||||
// Terminate all connections to the database
|
// Terminate all connections to the database
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'branch_${pullmergeRequestId}' AND pid <> pg_backend_pid();"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'branch_${pullmergeRequestId}' AND pid <> pg_backend_pid();"`
|
||||||
})
|
})
|
||||||
|
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDockerId,
|
dockerId: destinationDockerId,
|
||||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "DROP DATABASE branch_${pullmergeRequestId}"`
|
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "DROP DATABASE branch_${pullmergeRequestId}"`
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { FastifyPluginAsync } from 'fastify';
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
import { OnlyId } from '../../../../types';
|
import { OnlyId } from '../../../../types';
|
||||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getDockerImages, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, getUsageByContainer, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRegistry, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
||||||
|
|
||||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartApplication, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
fastify.addHook('onRequest', async (request) => {
|
fastify.addHook('onRequest', async (request) => {
|
||||||
@@ -21,7 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
|
|
||||||
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
||||||
|
|
||||||
fastify.post<OnlyId>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
fastify.post<RestartApplication>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
||||||
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||||
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
||||||
|
|
||||||
@@ -45,7 +45,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
||||||
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
||||||
|
|
||||||
// fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
|
|
||||||
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
|
fastify.get<GetApplicationLogs>('/:id/logs/:containerId', async (request) => await getApplicationLogs(request));
|
||||||
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
||||||
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||||
@@ -53,6 +52,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
||||||
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
|
fastify.get('/:id/usage/:containerId', async (request) => await getUsageByContainer(request))
|
||||||
|
|
||||||
|
fastify.get('/:id/images', async (request) => await getDockerImages(request))
|
||||||
|
|
||||||
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
fastify.post<DeployApplication>('/:id/deploy', async (request) => await deployApplication(request))
|
||||||
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
fastify.post<CancelDeployment>('/:id/cancel', async (request, reply) => await cancelDeployment(request, reply));
|
||||||
|
|
||||||
@@ -64,6 +65,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||||
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||||
|
|
||||||
|
fastify.post('/:id/configuration/registry', async (request, reply) => await saveRegistry(request, reply));
|
||||||
|
|
||||||
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
||||||
|
|
||||||
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||||
|
|||||||
@@ -19,12 +19,15 @@ export interface SaveApplication extends OnlyId {
|
|||||||
denoMainFile: string,
|
denoMainFile: string,
|
||||||
denoOptions: string,
|
denoOptions: string,
|
||||||
baseImage: string,
|
baseImage: string,
|
||||||
|
gitCommitHash: string,
|
||||||
baseBuildImage: string,
|
baseBuildImage: string,
|
||||||
deploymentType: string,
|
deploymentType: string,
|
||||||
baseDatabaseBranch: string,
|
baseDatabaseBranch: string,
|
||||||
dockerComposeFile: string,
|
dockerComposeFile: string,
|
||||||
dockerComposeFileLocation: string,
|
dockerComposeFileLocation: string,
|
||||||
dockerComposeConfiguration: string
|
dockerComposeConfiguration: string,
|
||||||
|
simpleDockerfile: string,
|
||||||
|
dockerRegistryImageName: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export interface SaveApplicationSettings extends OnlyId {
|
export interface SaveApplicationSettings extends OnlyId {
|
||||||
@@ -55,7 +58,7 @@ export interface GetImages {
|
|||||||
Body: { buildPack: string, deploymentType: string }
|
Body: { buildPack: string, deploymentType: string }
|
||||||
}
|
}
|
||||||
export interface SaveApplicationSource extends OnlyId {
|
export interface SaveApplicationSource extends OnlyId {
|
||||||
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string }
|
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string, simpleDockerfile?: string }
|
||||||
}
|
}
|
||||||
export interface CheckRepository extends OnlyId {
|
export interface CheckRepository extends OnlyId {
|
||||||
Querystring: { repository: string, branch: string }
|
Querystring: { repository: string, branch: string }
|
||||||
@@ -141,3 +144,11 @@ export interface RestartPreviewApplication {
|
|||||||
pullmergeRequestId: string | null,
|
pullmergeRequestId: string | null,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
export interface RestartApplication {
|
||||||
|
Params: {
|
||||||
|
id: string,
|
||||||
|
},
|
||||||
|
Body: {
|
||||||
|
imageId: string | null,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,24 +1,31 @@
|
|||||||
import { FastifyPluginAsync } from 'fastify';
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
import { errorHandler, listSettings, version } from '../../../../lib/common';
|
import { errorHandler, isARM, listSettings, version } from '../../../../lib/common';
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
|
fastify.addHook('onRequest', async (request) => {
|
||||||
|
try {
|
||||||
|
await request.jwtVerify();
|
||||||
|
} catch (error) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
fastify.get('/', async (request) => {
|
fastify.get('/', async (request) => {
|
||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const settings = await listSettings()
|
const settings = await listSettings();
|
||||||
try {
|
try {
|
||||||
return {
|
return {
|
||||||
ipv4: teamId ? settings.ipv4 : 'nope',
|
ipv4: teamId ? settings.ipv4 : null,
|
||||||
ipv6: teamId ? settings.ipv6 : 'nope',
|
ipv6: teamId ? settings.ipv6 : null,
|
||||||
version,
|
version,
|
||||||
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
||||||
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
||||||
isRegistrationEnabled: settings.isRegistrationEnabled,
|
isRegistrationEnabled: settings.isRegistrationEnabled,
|
||||||
}
|
isARM: isARM(process.arch)
|
||||||
|
};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default root;
|
export default root;
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import type { FastifyRequest } from 'fastify';
|
|||||||
import { FastifyReply } from 'fastify';
|
import { FastifyReply } from 'fastify';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeCommand, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
|
|
||||||
import type { OnlyId } from '../../../../types';
|
import type { OnlyId } from '../../../../types';
|
||||||
@@ -89,7 +89,7 @@ export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const { destinationDockerId, destinationDocker } = database;
|
const { destinationDockerId, destinationDocker } = database;
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
const { stdout } = await executeCommand({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||||
|
|
||||||
if (JSON.parse(stdout).Running) {
|
if (JSON.parse(stdout).Running) {
|
||||||
isRunning = true;
|
isRunning = true;
|
||||||
@@ -208,7 +208,7 @@ export async function saveDatabaseDestination(request: FastifyRequest<SaveDataba
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
if (type && version) {
|
if (type && version) {
|
||||||
const baseImage = getDatabaseImage(type, arch);
|
const baseImage = getDatabaseImage(type, arch);
|
||||||
executeDockerCmd({ dockerId, command: `docker pull ${baseImage}:${version}` })
|
executeCommand({ dockerId, command: `docker pull ${baseImage}:${version}` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return reply.code(201).send({})
|
return reply.code(201).send({})
|
||||||
@@ -298,7 +298,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
|||||||
};
|
};
|
||||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||||
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
await executeCommand({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||||
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
@@ -347,7 +347,7 @@ export async function getDatabaseLogs(request: FastifyRequest<GetDatabaseLogs>)
|
|||||||
// const found = await checkContainer({ dockerId, container: id })
|
// const found = await checkContainer({ dockerId, container: id })
|
||||||
// if (found) {
|
// if (found) {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${id}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import sshConfig from 'ssh-config'
|
|||||||
import fs from 'fs/promises'
|
import fs from 'fs/promises'
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
|
|
||||||
import { asyncExecShell, createRemoteEngineConfiguration, decrypt, errorHandler, executeDockerCmd, executeSSHCmd, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
import { createRemoteEngineConfiguration, decrypt, errorHandler, executeCommand, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
||||||
import { checkContainer } from '../../../../lib/docker';
|
import { checkContainer } from '../../../../lib/docker';
|
||||||
|
|
||||||
import type { OnlyId } from '../../../../types';
|
import type { OnlyId } from '../../../../types';
|
||||||
@@ -79,9 +79,9 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
|
|||||||
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
if (engine) {
|
if (engine) {
|
||||||
const { stdout } = await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network ls --filter 'name=^${network}$' --format '{{json .}}'`);
|
const { stdout } = await await executeCommand({ command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'` });
|
||||||
if (stdout === '') {
|
if (stdout === '') {
|
||||||
await asyncExecShell(`DOCKER_HOST=unix:///var/run/docker.sock docker network create --attachable ${network}`);
|
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
||||||
}
|
}
|
||||||
await prisma.destinationDocker.create({
|
await prisma.destinationDocker.create({
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||||
@@ -103,7 +103,7 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
|
|||||||
return reply.code(201).send({ id: destination.id });
|
return reply.code(201).send({ id: destination.id });
|
||||||
} else {
|
} else {
|
||||||
const destination = await prisma.destinationDocker.create({
|
const destination = await prisma.destinationDocker.create({
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort }
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort: Number(remotePort) }
|
||||||
});
|
});
|
||||||
return reply.code(201).send({ id: destination.id })
|
return reply.code(201).send({ id: destination.id })
|
||||||
}
|
}
|
||||||
@@ -122,13 +122,13 @@ export async function deleteDestination(request: FastifyRequest<OnlyId>) {
|
|||||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
||||||
if (isCoolifyProxyUsed) {
|
if (isCoolifyProxyUsed) {
|
||||||
if (engine || remoteVerified) {
|
if (engine || remoteVerified) {
|
||||||
const { stdout: found } = await executeDockerCmd({
|
const { stdout: found } = await executeCommand({
|
||||||
dockerId: id,
|
dockerId: id,
|
||||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||||
})
|
})
|
||||||
if (found) {
|
if (found) {
|
||||||
await executeDockerCmd({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
await executeCommand({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
||||||
await executeDockerCmd({ dockerId: id, command: `docker network rm ${network}` })
|
await executeCommand({ dockerId: id, command: `docker network rm ${network}` })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -203,22 +203,31 @@ export async function assignSSHKey(request: FastifyRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngineFn(id: string) {
|
export async function verifyRemoteDockerEngineFn(id: string) {
|
||||||
await createRemoteEngineConfiguration(id);
|
|
||||||
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
||||||
const host = `ssh://${remoteIpAddress}-remote`
|
const daemonJson = `daemon-${id}.json`
|
||||||
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
|
|
||||||
if (!stdout) {
|
|
||||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
|
|
||||||
}
|
|
||||||
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
|
|
||||||
if (!coolifyNetwork) {
|
|
||||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
|
|
||||||
}
|
|
||||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
|
||||||
try {
|
try {
|
||||||
const { stdout: daemonJson } = await executeSSHCmd({ dockerId: id, command: `cat /etc/docker/daemon.json` });
|
await executeCommand({ sshCommand: true, command: `docker network inspect ${network}`, dockerId: id });
|
||||||
let daemonJsonParsed = JSON.parse(daemonJson);
|
} catch (error) {
|
||||||
|
await executeCommand({ command: `docker network create --attachable ${network}`, dockerId: id });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await executeCommand({ sshCommand: true, command: `docker network inspect coolify-infra`, dockerId: id });
|
||||||
|
} catch (error) {
|
||||||
|
await executeCommand({ command: `docker network create --attachable coolify-infra`, dockerId: id });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||||
let isUpdated = false;
|
let isUpdated = false;
|
||||||
|
let daemonJsonParsed = {
|
||||||
|
"live-restore": true,
|
||||||
|
"features": {
|
||||||
|
"buildkit": true
|
||||||
|
}
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const { stdout: daemonJson } = await executeCommand({ sshCommand: true, dockerId: id, command: `cat /etc/docker/daemon.json` });
|
||||||
|
daemonJsonParsed = JSON.parse(daemonJson);
|
||||||
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
||||||
isUpdated = true;
|
isUpdated = true;
|
||||||
daemonJsonParsed['live-restore'] = true
|
daemonJsonParsed['live-restore'] = true
|
||||||
@@ -230,21 +239,19 @@ export async function verifyRemoteDockerEngineFn(id: string) {
|
|||||||
buildkit: true
|
buildkit: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (isUpdated) {
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const daemonJsonParsed = {
|
isUpdated = true;
|
||||||
"live-restore": true,
|
|
||||||
"features": {
|
|
||||||
"buildkit": true
|
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
if (isUpdated) {
|
||||||
|
await executeCommand({ shell: true, command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}` })
|
||||||
|
await executeCommand({ dockerId: id, command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json` });
|
||||||
|
await executeCommand({ command: `rm /tmp/${daemonJson}` })
|
||||||
|
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
||||||
}
|
}
|
||||||
await executeSSHCmd({ dockerId: id, command: `echo '${JSON.stringify(daemonJsonParsed)}' > /etc/docker/daemon.json` });
|
|
||||||
await executeSSHCmd({ dockerId: id, command: `systemctl restart docker` });
|
|
||||||
} finally {
|
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Error while verifying remote docker engine')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import bcrypt from "bcryptjs";
|
|||||||
import fs from 'fs/promises';
|
import fs from 'fs/promises';
|
||||||
import yaml from 'js-yaml';
|
import yaml from 'js-yaml';
|
||||||
import {
|
import {
|
||||||
asyncExecShell,
|
|
||||||
asyncSleep,
|
asyncSleep,
|
||||||
cleanupDockerStorage,
|
cleanupDockerStorage,
|
||||||
errorHandler,
|
errorHandler,
|
||||||
@@ -13,6 +12,8 @@ import {
|
|||||||
prisma,
|
prisma,
|
||||||
uniqueName,
|
uniqueName,
|
||||||
version,
|
version,
|
||||||
|
sentryDSN,
|
||||||
|
executeCommand,
|
||||||
} from "../../../lib/common";
|
} from "../../../lib/common";
|
||||||
import { scheduler } from "../../../lib/scheduler";
|
import { scheduler } from "../../../lib/scheduler";
|
||||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||||
@@ -24,6 +25,35 @@ export async function hashPassword(password: string): Promise<string> {
|
|||||||
return bcrypt.hash(password, saltRounds);
|
return bcrypt.hash(password, saltRounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function backup(request: FastifyRequest) {
|
||||||
|
try {
|
||||||
|
const { backupData } = request.params;
|
||||||
|
let std = null;
|
||||||
|
const [id, backupType, type, zipped, storage] = backupData.split(':')
|
||||||
|
console.log(id, backupType, type, zipped, storage)
|
||||||
|
const database = await prisma.database.findUnique({ where: { id } })
|
||||||
|
if (database) {
|
||||||
|
// await executeDockerCmd({
|
||||||
|
// dockerId: database.destinationDockerId,
|
||||||
|
// command: `docker pull coollabsio/backup:latest`,
|
||||||
|
// })
|
||||||
|
std = await executeCommand({
|
||||||
|
dockerId: database.destinationDockerId,
|
||||||
|
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
if (std.stdout) {
|
||||||
|
return std.stdout;
|
||||||
|
}
|
||||||
|
if (std.stderr) {
|
||||||
|
return std.stderr;
|
||||||
|
}
|
||||||
|
return 'nope';
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
|
}
|
||||||
export async function cleanupManually(request: FastifyRequest) {
|
export async function cleanupManually(request: FastifyRequest) {
|
||||||
try {
|
try {
|
||||||
const { serverId } = request.body;
|
const { serverId } = request.body;
|
||||||
@@ -110,14 +140,10 @@ export async function update(request: FastifyRequest<Update>) {
|
|||||||
try {
|
try {
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
||||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
||||||
await asyncExecShell(
|
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` });
|
||||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` });
|
||||||
);
|
|
||||||
await asyncExecShell(
|
|
||||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
|
||||||
);
|
|
||||||
return {};
|
return {};
|
||||||
} else {
|
} else {
|
||||||
await asyncSleep(2000);
|
await asyncSleep(2000);
|
||||||
@@ -146,7 +172,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
if (teamId === "0") {
|
if (teamId === "0") {
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
asyncExecShell(`docker restart coolify`);
|
await executeCommand({ command: `docker restart coolify` });
|
||||||
return {};
|
return {};
|
||||||
} else {
|
} else {
|
||||||
return {};
|
return {};
|
||||||
@@ -189,7 +215,7 @@ export async function showDashboard(request: FastifyRequest) {
|
|||||||
|
|
||||||
let foundUnconfiguredApplication = false;
|
let foundUnconfiguredApplication = false;
|
||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
|
if (((!application.buildPack || !application.branch) && !application.simpleDockerfile) || !application.destinationDockerId || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
|
||||||
foundUnconfiguredApplication = true
|
foundUnconfiguredApplication = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -398,7 +424,8 @@ export async function getCurrentUser(
|
|||||||
}
|
}
|
||||||
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
|
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
|
||||||
return {
|
return {
|
||||||
settings: await prisma.setting.findFirst(),
|
settings: await prisma.setting.findUnique({ where: { id: "0" } }),
|
||||||
|
sentryDSN,
|
||||||
pendingInvitations,
|
pendingInvitations,
|
||||||
token,
|
token,
|
||||||
...request.user,
|
...request.user,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { FastifyPluginAsync } from 'fastify';
|
import { FastifyPluginAsync } from 'fastify';
|
||||||
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
|
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify, backup } from './handlers';
|
||||||
import { GetCurrentUser } from './types';
|
import { GetCurrentUser } from './types';
|
||||||
|
|
||||||
export interface Update {
|
export interface Update {
|
||||||
@@ -52,6 +52,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.post('/internal/cleanup', {
|
fastify.post('/internal/cleanup', {
|
||||||
onRequest: [fastify.authenticate]
|
onRequest: [fastify.authenticate]
|
||||||
}, async (request) => await cleanupManually(request));
|
}, async (request) => await cleanupManually(request));
|
||||||
|
|
||||||
|
// fastify.get('/internal/backup/:backupData', {
|
||||||
|
// onRequest: [fastify.authenticate]
|
||||||
|
// }, async (request) => await backup(request));
|
||||||
};
|
};
|
||||||
|
|
||||||
export default root;
|
export default root;
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { errorHandler, executeDockerCmd, prisma, createRemoteEngineConfiguration, executeSSHCmd } from '../../../../lib/common';
|
import { errorHandler, prisma, executeCommand } from '../../../../lib/common';
|
||||||
import os from 'node:os';
|
import os from 'node:os';
|
||||||
import osu from 'node-os-utils';
|
import osu from 'node-os-utils';
|
||||||
|
|
||||||
@@ -71,10 +71,10 @@ export async function showUsage(request: FastifyRequest) {
|
|||||||
let { remoteEngine } = request.query
|
let { remoteEngine } = request.query
|
||||||
remoteEngine = remoteEngine === 'true' ? true : false
|
remoteEngine = remoteEngine === 'true' ? true : false
|
||||||
if (remoteEngine) {
|
if (remoteEngine) {
|
||||||
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` })
|
const { stdout: stats } = await executeCommand({ sshCommand: true, dockerId: id, command: `vmstat -s` })
|
||||||
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
|
const { stdout: disks } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
|
||||||
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` })
|
const { stdout: cpus } = await executeCommand({ sshCommand: true, dockerId: id, command: `nproc --all` })
|
||||||
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
|
const { stdout: cpuUsage } = await executeCommand({ sshCommand: true, shell: true, dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
|
||||||
const parsed: any = parseFromText(stats)
|
const parsed: any = parseFromText(stats)
|
||||||
return {
|
return {
|
||||||
usage: {
|
usage: {
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import yaml from 'js-yaml';
|
|||||||
import bcrypt from 'bcryptjs';
|
import bcrypt from 'bcryptjs';
|
||||||
import cuid from 'cuid';
|
import cuid from 'cuid';
|
||||||
|
|
||||||
import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort, listSettings } from '../../../../lib/common';
|
import { prisma, uniqueName, getServiceFromDB, getContainerUsage, isDomainConfigured, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, checkDomainsIsValidInDNS, checkExposedPort, listSettings, generateToken, executeCommand } from '../../../../lib/common';
|
||||||
import { day } from '../../../../lib/dayjs';
|
import { day } from '../../../../lib/dayjs';
|
||||||
import { checkContainer, } from '../../../../lib/docker';
|
import { checkContainer, } from '../../../../lib/docker';
|
||||||
import { removeService } from '../../../../lib/services/common';
|
import { removeService } from '../../../../lib/services/common';
|
||||||
@@ -48,14 +48,19 @@ export async function cleanupUnconfiguredServices(request: FastifyRequest) {
|
|||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
if (!service.fqdn) {
|
if (!service.fqdn) {
|
||||||
if (service.destinationDockerId) {
|
if (service.destinationDockerId) {
|
||||||
await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: service.destinationDockerId,
|
dockerId: service.destinationDockerId,
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
|
||||||
})
|
|
||||||
await executeDockerCmd({
|
|
||||||
dockerId: service.destinationDockerId,
|
|
||||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
|
||||||
})
|
})
|
||||||
|
if (containers) {
|
||||||
|
const containerArray = containers.split('\n');
|
||||||
|
if (containerArray.length > 0) {
|
||||||
|
for (const container of containerArray) {
|
||||||
|
await executeCommand({ dockerId: service.destinationDockerId, command: `docker stop -t 0 ${container}` })
|
||||||
|
await executeCommand({ dockerId: service.destinationDockerId, command: `docker rm --force ${container}` })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
await removeService({ id: service.id });
|
await removeService({ id: service.id });
|
||||||
}
|
}
|
||||||
@@ -73,23 +78,27 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
const { destinationDockerId, settings } = service;
|
const { destinationDockerId, settings } = service;
|
||||||
let payload = {}
|
let payload = {}
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
const { stdout: containers } = await executeDockerCmd({
|
const { stdout: containers } = await executeCommand({
|
||||||
dockerId: service.destinationDocker.id,
|
dockerId: service.destinationDocker.id,
|
||||||
command:
|
command:
|
||||||
`docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
`docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
||||||
});
|
});
|
||||||
|
if (containers) {
|
||||||
const containersArray = containers.trim().split('\n');
|
const containersArray = containers.trim().split('\n');
|
||||||
if (containersArray.length > 0 && containersArray[0] !== '') {
|
if (containersArray.length > 0 && containersArray[0] !== '') {
|
||||||
const templates = await getTemplates();
|
const templates = await getTemplates();
|
||||||
let template = templates.find(t => t.type === service.type);
|
let template = templates.find(t => t.type === service.type);
|
||||||
template = JSON.parse(JSON.stringify(template).replaceAll('$$id', service.id));
|
const templateStr = JSON.stringify(template)
|
||||||
|
if (templateStr) {
|
||||||
|
template = JSON.parse(templateStr.replaceAll('$$id', service.id));
|
||||||
|
}
|
||||||
for (const container of containersArray) {
|
for (const container of containersArray) {
|
||||||
let isRunning = false;
|
let isRunning = false;
|
||||||
let isExited = false;
|
let isExited = false;
|
||||||
let isRestarting = false;
|
let isRestarting = false;
|
||||||
let isExcluded = false;
|
let isExcluded = false;
|
||||||
const containerObj = JSON.parse(container);
|
const containerObj = JSON.parse(container);
|
||||||
const exclude = template.services[containerObj.Names]?.exclude;
|
const exclude = template?.services[containerObj.Names]?.exclude;
|
||||||
if (exclude) {
|
if (exclude) {
|
||||||
payload[containerObj.Names] = {
|
payload[containerObj.Names] = {
|
||||||
status: {
|
status: {
|
||||||
@@ -123,6 +132,8 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
return payload
|
return payload
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@@ -149,18 +160,24 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
parsedTemplate[realKey] = {
|
parsedTemplate[realKey] = {
|
||||||
|
value,
|
||||||
name,
|
name,
|
||||||
documentation: value.documentation || foundTemplate.documentation || 'https://docs.coollabs.io',
|
documentation: value.documentation || foundTemplate.documentation || 'https://docs.coollabs.io',
|
||||||
image: value.image,
|
image: value.image,
|
||||||
|
files: value?.files,
|
||||||
environment: [],
|
environment: [],
|
||||||
fqdns: [],
|
fqdns: [],
|
||||||
|
hostPorts: [],
|
||||||
proxy: {}
|
proxy: {}
|
||||||
}
|
}
|
||||||
if (value.environment?.length > 0) {
|
if (value.environment?.length > 0) {
|
||||||
for (const env of value.environment) {
|
for (const env of value.environment) {
|
||||||
let [envKey, ...envValue] = env.split('=')
|
let [envKey, ...envValue] = env.split('=')
|
||||||
envValue = envValue.join("=")
|
envValue = envValue.join("=")
|
||||||
const variable = foundTemplate.variables.find(v => v.name === envKey) || foundTemplate.variables.find(v => v.id === envValue)
|
let variable = null
|
||||||
|
if (foundTemplate?.variables) {
|
||||||
|
variable = foundTemplate?.variables.find(v => v.name === envKey) || foundTemplate?.variables.find(v => v.id === envValue)
|
||||||
|
}
|
||||||
if (variable) {
|
if (variable) {
|
||||||
const id = variable.id.replaceAll('$$', '')
|
const id = variable.id.replaceAll('$$', '')
|
||||||
const label = variable?.label
|
const label = variable?.label
|
||||||
@@ -186,7 +203,7 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
if (value?.proxy && value.proxy.length > 0) {
|
if (value?.proxy && value.proxy.length > 0) {
|
||||||
for (const proxyValue of value.proxy) {
|
for (const proxyValue of value.proxy) {
|
||||||
if (proxyValue.domain) {
|
if (proxyValue.domain) {
|
||||||
const variable = foundTemplate.variables.find(v => v.id === proxyValue.domain)
|
const variable = foundTemplate?.variables.find(v => v.id === proxyValue.domain)
|
||||||
if (variable) {
|
if (variable) {
|
||||||
const { id, name, label, description, defaultValue, required = false } = variable
|
const { id, name, label, description, defaultValue, required = false } = variable
|
||||||
const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id, variableName: proxyValue.domain } })
|
const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id, variableName: proxyValue.domain } })
|
||||||
@@ -194,7 +211,16 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
{ id, name, value: found?.value || '', label, description, defaultValue, required }
|
{ id, name, value: found?.value || '', label, description, defaultValue, required }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (proxyValue.hostPort) {
|
||||||
|
const variable = foundTemplate?.variables.find(v => v.id === proxyValue.hostPort)
|
||||||
|
if (variable) {
|
||||||
|
const { id, name, label, description, defaultValue, required = false } = variable
|
||||||
|
const found = await prisma.serviceSetting.findFirst({ where: { serviceId: service.id, variableName: proxyValue.hostPort } })
|
||||||
|
parsedTemplate[realKey].hostPorts.push(
|
||||||
|
{ id, name, value: found?.value || '', label, description, defaultValue, required }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -208,7 +234,7 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
strParsedTemplate = strParsedTemplate.replaceAll('$$id', service.id)
|
strParsedTemplate = strParsedTemplate.replaceAll('$$id', service.id)
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll('$$core_version', service.version || foundTemplate.defaultVersion)
|
strParsedTemplate = strParsedTemplate.replaceAll('$$core_version', service.version || foundTemplate.defaultVersion)
|
||||||
|
|
||||||
// replace $$fqdn
|
// replace $$workdir
|
||||||
if (workdir) {
|
if (workdir) {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll('$$workdir', workdir)
|
strParsedTemplate = strParsedTemplate.replaceAll('$$workdir', workdir)
|
||||||
}
|
}
|
||||||
@@ -217,15 +243,17 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
if (service.serviceSetting.length > 0) {
|
if (service.serviceSetting.length > 0) {
|
||||||
for (const setting of service.serviceSetting) {
|
for (const setting of service.serviceSetting) {
|
||||||
const { value, variableName } = setting
|
const { value, variableName } = setting
|
||||||
const regex = new RegExp(`\\$\\$config_${variableName.replace('$$config_', '')}\\"`, 'gi')
|
const regex = new RegExp(`\\$\\$config_${variableName.replace('$$config_', '')}\"`, 'gi')
|
||||||
if (value === '$$generate_fqdn') {
|
if (value === '$$generate_fqdn') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + "\"" || '' + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '"' || '' + '"')
|
||||||
|
} else if (value === '$$generate_fqdn_slash') {
|
||||||
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.fqdn + '/' + '"')
|
||||||
} else if (value === '$$generate_domain') {
|
} else if (value === '$$generate_domain') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, getDomain(service.fqdn) + '"')
|
||||||
} else if (service.destinationDocker?.network && value === '$$generate_network') {
|
} else if (service.destinationDocker?.network && value === '$$generate_network') {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, service.destinationDocker.network + '"')
|
||||||
} else {
|
} else {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, value + '"')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -233,15 +261,16 @@ export async function parseAndFindServiceTemplates(service: any, workdir?: strin
|
|||||||
// replace $$secret
|
// replace $$secret
|
||||||
if (service.serviceSecret.length > 0) {
|
if (service.serviceSecret.length > 0) {
|
||||||
for (const secret of service.serviceSecret) {
|
for (const secret of service.serviceSecret) {
|
||||||
const { name, value } = secret
|
let { name, value } = secret
|
||||||
const regexHashed = new RegExp(`\\$\\$hashed\\$\\$secret_${name}\\"`, 'gi')
|
name = name.toLowerCase()
|
||||||
const regex = new RegExp(`\\$\\$secret_${name}\\"`, 'gi')
|
const regexHashed = new RegExp(`\\$\\$hashed\\$\\$secret_${name}\"`, 'gi')
|
||||||
|
const regex = new RegExp(`\\$\\$secret_${name}\"`, 'gi')
|
||||||
if (value) {
|
if (value) {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, bcrypt.hashSync(value.replaceAll("\"", "\\\""), 10) + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, bcrypt.hashSync(value.replaceAll("\"", "\\\""), 10) + '"')
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, value.replaceAll("\"", "\\\"") + "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, value.replaceAll("\"", "\\\"") + '"')
|
||||||
} else {
|
} else {
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regexHashed, '' + '"')
|
||||||
strParsedTemplate = strParsedTemplate.replaceAll(regex, "\"")
|
strParsedTemplate = strParsedTemplate.replaceAll(regex, '' + '"')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -291,6 +320,7 @@ export async function saveServiceType(request: FastifyRequest<SaveServiceType>,
|
|||||||
let foundTemplate = templates.find(t => fixType(t.type) === fixType(type))
|
let foundTemplate = templates.find(t => fixType(t.type) === fixType(type))
|
||||||
if (foundTemplate) {
|
if (foundTemplate) {
|
||||||
foundTemplate = JSON.parse(JSON.stringify(foundTemplate).replaceAll('$$id', id))
|
foundTemplate = JSON.parse(JSON.stringify(foundTemplate).replaceAll('$$id', id))
|
||||||
|
if (foundTemplate.variables) {
|
||||||
if (foundTemplate.variables.length > 0) {
|
if (foundTemplate.variables.length > 0) {
|
||||||
for (const variable of foundTemplate.variables) {
|
for (const variable of foundTemplate.variables) {
|
||||||
const { defaultValue } = variable;
|
const { defaultValue } = variable;
|
||||||
@@ -302,6 +332,8 @@ export async function saveServiceType(request: FastifyRequest<SaveServiceType>,
|
|||||||
variable.value = generatePassword({ length, isHex: true });
|
variable.value = generatePassword({ length, isHex: true });
|
||||||
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
||||||
variable.value = cuid();
|
variable.value = cuid();
|
||||||
|
} else if (variable.defaultValue.startsWith('$$generate_token')) {
|
||||||
|
variable.value = generateToken()
|
||||||
} else {
|
} else {
|
||||||
variable.value = variable.defaultValue || '';
|
variable.value = variable.defaultValue || '';
|
||||||
}
|
}
|
||||||
@@ -330,6 +362,7 @@ export async function saveServiceType(request: FastifyRequest<SaveServiceType>,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (const service of Object.keys(foundTemplate.services)) {
|
for (const service of Object.keys(foundTemplate.services)) {
|
||||||
if (foundTemplate.services[service].volumes) {
|
if (foundTemplate.services[service].volumes) {
|
||||||
for (const volume of foundTemplate.services[service].volumes) {
|
for (const volume of foundTemplate.services[service].volumes) {
|
||||||
@@ -418,7 +451,7 @@ export async function getServiceLogs(request: FastifyRequest<GetServiceLogs>) {
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
const { default: ansi } = await import('strip-ansi')
|
const { default: ansi } = await import('strip-ansi')
|
||||||
const { stdout, stderr } = await executeDockerCmd({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
const { stdout, stderr } = await executeCommand({ dockerId, command: `docker logs --since ${since} --tail 5000 --timestamps ${containerId}` })
|
||||||
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStdout = stdout.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
const stripLogsStderr = stderr.toString().split('\n').map((l) => ansi(l)).filter((a) => a);
|
||||||
const logs = stripLogsStderr.concat(stripLogsStdout)
|
const logs = stripLogsStderr.concat(stripLogsStdout)
|
||||||
@@ -532,7 +565,7 @@ export async function saveService(request: FastifyRequest<SaveService>, reply: F
|
|||||||
}
|
}
|
||||||
if (isNew) {
|
if (isNew) {
|
||||||
if (!variableName) {
|
if (!variableName) {
|
||||||
variableName = foundTemplate.variables.find(v => v.name === name).id
|
variableName = foundTemplate?.variables.find(v => v.name === name).id
|
||||||
}
|
}
|
||||||
await prisma.serviceSetting.create({ data: { name, value, variableName, service: { connect: { id } } } })
|
await prisma.serviceSetting.create({ data: { name, value, variableName, service: { connect: { id } } } })
|
||||||
}
|
}
|
||||||
@@ -724,7 +757,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
|
|||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL');
|
const databaseUrl = serviceSecret.find((secret) => secret.name === 'DATABASE_URL');
|
||||||
if (databaseUrl) {
|
if (databaseUrl) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"`
|
command: `docker exec ${id}-postgresql psql -H ${databaseUrl.value} -c "UPDATE users SET email_verified = true;"`
|
||||||
})
|
})
|
||||||
@@ -745,9 +778,10 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
|
|||||||
destinationDocker,
|
destinationDocker,
|
||||||
} = await getServiceFromDB({ id, teamId });
|
} = await getServiceFromDB({ id, teamId });
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`
|
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
return await reply.code(201).send()
|
return await reply.code(201).send()
|
||||||
}
|
}
|
||||||
@@ -787,36 +821,42 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
if (user) ftpUser = user;
|
if (user) ftpUser = user;
|
||||||
if (savedPassword) ftpPassword = decrypt(savedPassword);
|
if (savedPassword) ftpPassword = decrypt(savedPassword);
|
||||||
|
|
||||||
const { stdout: password } = await asyncExecShell(
|
// TODO: rewrite these to usable without shell
|
||||||
`echo ${ftpPassword} | openssl passwd -1 -stdin`
|
const { stdout: password } = await executeCommand({
|
||||||
|
command:
|
||||||
|
`echo ${ftpPassword} | openssl passwd -1 -stdin`,
|
||||||
|
shell: true
|
||||||
|
}
|
||||||
);
|
);
|
||||||
if (destinationDockerId) {
|
if (destinationDockerId) {
|
||||||
try {
|
try {
|
||||||
await fs.stat(hostkeyDir);
|
await fs.stat(hostkeyDir);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await asyncExecShell(`mkdir -p ${hostkeyDir}`);
|
await executeCommand({ command: `mkdir -p ${hostkeyDir}` });
|
||||||
}
|
}
|
||||||
if (!ftpHostKey) {
|
if (!ftpHostKey) {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
|
command:
|
||||||
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
|
`ssh-keygen -t ed25519 -f ssh_host_ed25519_key -N "" -q -f ${hostkeyDir}/${id}.ed25519`
|
||||||
|
}
|
||||||
);
|
);
|
||||||
const { stdout: ftpHostKey } = await asyncExecShell(`cat ${hostkeyDir}/${id}.ed25519`);
|
const { stdout: ftpHostKey } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.ed25519` });
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
where: { serviceId: id },
|
where: { serviceId: id },
|
||||||
data: { ftpHostKey: encrypt(ftpHostKey) }
|
data: { ftpHostKey: encrypt(ftpHostKey) }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(`echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`);
|
await executeCommand({ command: `echo "${decrypt(ftpHostKey)}" > ${hostkeyDir}/${id}.ed25519`, shell: true });
|
||||||
}
|
}
|
||||||
if (!ftpHostKeyPrivate) {
|
if (!ftpHostKeyPrivate) {
|
||||||
await asyncExecShell(`ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa`);
|
await executeCommand({ command: `ssh-keygen -t rsa -b 4096 -N "" -f ${hostkeyDir}/${id}.rsa` });
|
||||||
const { stdout: ftpHostKeyPrivate } = await asyncExecShell(`cat ${hostkeyDir}/${id}.rsa`);
|
const { stdout: ftpHostKeyPrivate } = await executeCommand({ command: `cat ${hostkeyDir}/${id}.rsa` });
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
where: { serviceId: id },
|
where: { serviceId: id },
|
||||||
data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) }
|
data: { ftpHostKeyPrivate: encrypt(ftpHostKeyPrivate) }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await asyncExecShell(`echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`);
|
await executeCommand({ command: `echo "${decrypt(ftpHostKeyPrivate)}" > ${hostkeyDir}/${id}.rsa`, shell: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
await prisma.wordpress.update({
|
await prisma.wordpress.update({
|
||||||
@@ -831,9 +871,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
try {
|
try {
|
||||||
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
||||||
if (isRunning) {
|
if (isRunning) {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`
|
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
@@ -877,9 +918,9 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
`${hostkeyDir}/${id}.sh`,
|
`${hostkeyDir}/${id}.sh`,
|
||||||
`#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/`
|
`#!/bin/bash\nchmod 600 /etc/ssh/ssh_host_ed25519_key /etc/ssh/ssh_host_rsa_key\nuserdel -f xfs\nchown -R 33:33 /home/${ftpUser}/wordpress/`
|
||||||
);
|
);
|
||||||
await asyncExecShell(`chmod +x ${hostkeyDir}/${id}.sh`);
|
await executeCommand({ command: `chmod +x ${hostkeyDir}/${id}.sh` });
|
||||||
await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose));
|
await fs.writeFile(`${hostkeyDir}/${id}-docker-compose.yml`, yaml.dump(compose));
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
|
command: `docker compose -f ${hostkeyDir}/${id}-docker-compose.yml up -d`
|
||||||
})
|
})
|
||||||
@@ -896,9 +937,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
data: { ftpPublicPort: null }
|
data: { ftpPublicPort: null }
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
await executeDockerCmd({
|
await executeCommand({
|
||||||
dockerId: destinationDocker.id,
|
dockerId: destinationDocker.id,
|
||||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`
|
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`,
|
||||||
|
shell: true
|
||||||
})
|
})
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -912,8 +954,10 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
|||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
try {
|
||||||
await asyncExecShell(
|
await executeCommand({
|
||||||
|
command:
|
||||||
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
||||||
|
}
|
||||||
);
|
);
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import { promises as dns } from 'dns';
|
import { promises as dns } from 'dns';
|
||||||
import { X509Certificate } from 'node:crypto';
|
import { X509Certificate } from 'node:crypto';
|
||||||
|
import * as Sentry from '@sentry/node';
|
||||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||||
import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDev, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common';
|
import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, executeCommand, getDomain, isDev, isDNSValid, isDomainConfigured, listSettings, prisma, sentryDSN, version } from '../../../../lib/common';
|
||||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
|
||||||
|
|
||||||
|
|
||||||
export async function listAllSettings(request: FastifyRequest) {
|
export async function listAllSettings(request: FastifyRequest) {
|
||||||
@@ -11,6 +11,13 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const settings = await listSettings();
|
const settings = await listSettings();
|
||||||
const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } })
|
const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } })
|
||||||
|
let registries = await prisma.dockerRegistry.findMany({ where: { team: { id: teamId } } })
|
||||||
|
registries = registries.map((registry) => {
|
||||||
|
if (registry.password) {
|
||||||
|
registry.password = decrypt(registry.password)
|
||||||
|
}
|
||||||
|
return registry
|
||||||
|
})
|
||||||
const unencryptedKeys = []
|
const unencryptedKeys = []
|
||||||
if (sshKeys.length > 0) {
|
if (sshKeys.length > 0) {
|
||||||
for (const key of sshKeys) {
|
for (const key of sshKeys) {
|
||||||
@@ -27,7 +34,8 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
return {
|
return {
|
||||||
settings,
|
settings,
|
||||||
certificates: cns,
|
certificates: cns,
|
||||||
sshKeys: unencryptedKeys
|
sshKeys: unencryptedKeys,
|
||||||
|
registries
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@@ -35,7 +43,10 @@ export async function listAllSettings(request: FastifyRequest) {
|
|||||||
}
|
}
|
||||||
export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) {
|
export async function saveSettings(request: FastifyRequest<SaveSettings>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const {
|
let {
|
||||||
|
previewSeparator,
|
||||||
|
numberOfDockerImagesKeptLocally,
|
||||||
|
doNotTrack,
|
||||||
fqdn,
|
fqdn,
|
||||||
isAPIDebuggingEnabled,
|
isAPIDebuggingEnabled,
|
||||||
isRegistrationEnabled,
|
isRegistrationEnabled,
|
||||||
@@ -47,10 +58,29 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
|||||||
DNSServers,
|
DNSServers,
|
||||||
proxyDefaultRedirect
|
proxyDefaultRedirect
|
||||||
} = request.body
|
} = request.body
|
||||||
const { id } = await listSettings();
|
const { id, previewSeparator: SetPreviewSeparator } = await listSettings();
|
||||||
|
if (numberOfDockerImagesKeptLocally) {
|
||||||
|
numberOfDockerImagesKeptLocally = Number(numberOfDockerImagesKeptLocally)
|
||||||
|
}
|
||||||
|
if (previewSeparator == '') {
|
||||||
|
previewSeparator = '.'
|
||||||
|
}
|
||||||
|
if (SetPreviewSeparator != previewSeparator) {
|
||||||
|
const applications = await prisma.application.findMany({ where: { previewApplication: { some: { id: { not: undefined } } } }, include: { previewApplication: true } })
|
||||||
|
for (const application of applications) {
|
||||||
|
for (const preview of application.previewApplication) {
|
||||||
|
const { protocol } = new URL(preview.customDomain)
|
||||||
|
const { pullmergeRequestId } = preview
|
||||||
|
const { fqdn } = application
|
||||||
|
const newPreviewDomain = `${protocol}//${pullmergeRequestId}${previewSeparator}${getDomain(fqdn)}`
|
||||||
|
await prisma.previewApplication.update({ where: { id: preview.id }, data: { customDomain: newPreviewDomain } })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await prisma.setting.update({
|
await prisma.setting.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled, }
|
data: { previewSeparator, numberOfDockerImagesKeptLocally, doNotTrack, isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled }
|
||||||
});
|
});
|
||||||
if (fqdn) {
|
if (fqdn) {
|
||||||
await prisma.setting.update({ where: { id }, data: { fqdn } });
|
await prisma.setting.update({ where: { id }, data: { fqdn } });
|
||||||
@@ -59,6 +89,14 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
|||||||
if (minPort && maxPort) {
|
if (minPort && maxPort) {
|
||||||
await prisma.setting.update({ where: { id }, data: { minPort, maxPort } });
|
await prisma.setting.update({ where: { id }, data: { minPort, maxPort } });
|
||||||
}
|
}
|
||||||
|
if (doNotTrack === false) {
|
||||||
|
// Sentry.init({
|
||||||
|
// dsn: sentryDSN,
|
||||||
|
// environment: isDev ? 'development' : 'production',
|
||||||
|
// release: version
|
||||||
|
// });
|
||||||
|
// console.log('Sentry initialized')
|
||||||
|
}
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@@ -91,7 +129,7 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
|
|||||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||||
const found = await isDomainConfigured({ id, fqdn });
|
const found = await isDomainConfigured({ id, fqdn });
|
||||||
if (found) {
|
if (found) {
|
||||||
throw "Domain already configured";
|
throw { message: "Domain already configured" };
|
||||||
}
|
}
|
||||||
if (isDNSCheckEnabled && !forceSave && !isDev) {
|
if (isDNSCheckEnabled && !forceSave && !isDev) {
|
||||||
const hostname = request.hostname.split(':')[0]
|
const hostname = request.hostname.split(':')[0]
|
||||||
@@ -131,8 +169,9 @@ export async function saveSSHKey(request: FastifyRequest<SaveSSHKey>, reply: Fas
|
|||||||
}
|
}
|
||||||
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
const { id } = request.body;
|
const { id } = request.body;
|
||||||
await prisma.sshKey.delete({ where: { id } })
|
await prisma.sshKey.deleteMany({ where: { id, teamId } })
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
@@ -141,9 +180,54 @@ export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply:
|
|||||||
|
|
||||||
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
const { id } = request.body;
|
const { id } = request.body;
|
||||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`)
|
await executeCommand({ command: `docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`, shell: true })
|
||||||
await prisma.certificate.delete({ where: { id } })
|
await prisma.certificate.deleteMany({ where: { id, teamId } })
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setDockerRegistry(request: FastifyRequest<SetDefaultRegistry>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id, username, password } = request.body;
|
||||||
|
|
||||||
|
let encryptedPassword = ''
|
||||||
|
if (password) encryptedPassword = encrypt(password)
|
||||||
|
|
||||||
|
if (teamId === '0') {
|
||||||
|
await prisma.dockerRegistry.update({ where: { id }, data: { username, password: encryptedPassword } })
|
||||||
|
} else {
|
||||||
|
await prisma.dockerRegistry.updateMany({ where: { id, teamId }, data: { username, password: encryptedPassword } })
|
||||||
|
}
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function addDockerRegistry(request: FastifyRequest<AddDefaultRegistry>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { name, url, username, password } = request.body;
|
||||||
|
|
||||||
|
let encryptedPassword = ''
|
||||||
|
if (password) encryptedPassword = encrypt(password)
|
||||||
|
await prisma.dockerRegistry.create({ data: { name, url, username, password: encryptedPassword, team: { connect: { id: teamId } } } })
|
||||||
|
|
||||||
|
return reply.code(201).send()
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export async function deleteDockerRegistry(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||||
|
try {
|
||||||
|
const teamId = request.user.teamId;
|
||||||
|
const { id } = request.body;
|
||||||
|
await prisma.application.updateMany({ where: { dockerRegistryId: id }, data: { dockerRegistryId: null } })
|
||||||
|
await prisma.dockerRegistry.deleteMany({ where: { id, teamId } })
|
||||||
return reply.code(201).send()
|
return reply.code(201).send()
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message })
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import { FastifyPluginAsync } from 'fastify';
|
|||||||
import { X509Certificate } from 'node:crypto';
|
import { X509Certificate } from 'node:crypto';
|
||||||
|
|
||||||
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
|
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
|
||||||
import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers';
|
import { addDockerRegistry, checkDNS, checkDomain, deleteCertificates, deleteDockerRegistry, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey, setDockerRegistry } from './handlers';
|
||||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types';
|
||||||
|
|
||||||
|
|
||||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||||
@@ -20,6 +20,10 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
||||||
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
||||||
|
|
||||||
|
fastify.post<SetDefaultRegistry>('/registry', async (request, reply) => await setDockerRegistry(request, reply));
|
||||||
|
fastify.post<AddDefaultRegistry>('/registry/new', async (request, reply) => await addDockerRegistry(request, reply));
|
||||||
|
fastify.delete<OnlyIdInBody>('/registry', async (request, reply) => await deleteDockerRegistry(request, reply));
|
||||||
|
|
||||||
fastify.post('/upload', async (request) => {
|
fastify.post('/upload', async (request) => {
|
||||||
try {
|
try {
|
||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
@@ -53,7 +57,6 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
|||||||
|
|
||||||
});
|
});
|
||||||
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
|
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
|
||||||
// fastify.get('/certificates', async (request) => await getCertificates(request))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export default root;
|
export default root;
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ import { OnlyId } from "../../../../types"
|
|||||||
|
|
||||||
export interface SaveSettings {
|
export interface SaveSettings {
|
||||||
Body: {
|
Body: {
|
||||||
|
previewSeparator: string,
|
||||||
|
numberOfDockerImagesKeptLocally: number,
|
||||||
|
doNotTrack: boolean,
|
||||||
fqdn: string,
|
fqdn: string,
|
||||||
isAPIDebuggingEnabled: boolean,
|
isAPIDebuggingEnabled: boolean,
|
||||||
isRegistrationEnabled: boolean,
|
isRegistrationEnabled: boolean,
|
||||||
@@ -48,3 +51,19 @@ export interface OnlyIdInBody {
|
|||||||
id: string
|
id: string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface SetDefaultRegistry {
|
||||||
|
Body: {
|
||||||
|
id: string
|
||||||
|
username: string
|
||||||
|
password: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export interface AddDefaultRegistry {
|
||||||
|
Body: {
|
||||||
|
url: string
|
||||||
|
name: string
|
||||||
|
username: string
|
||||||
|
password: string
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -37,9 +37,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
|
|||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user
|
||||||
|
|
||||||
const settings = await prisma.setting.findFirst({});
|
const settings = await prisma.setting.findFirst({});
|
||||||
if (settings.proxyPassword) settings.proxyPassword = decrypt(settings.proxyPassword);
|
|
||||||
|
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
|
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
|
||||||
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
|
const githubSignature = request.headers['x-hub-signature-256']?.toString().toLowerCase();
|
||||||
if (!allowedGithubEvents.includes(githubEvent)) {
|
if (!allowedGithubEvents.includes(githubEvent)) {
|
||||||
throw { status: 500, message: 'Event not allowed.' }
|
throw { status: 500, message: 'Event not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (githubEvent === 'ping') {
|
if (githubEvent === 'ping') {
|
||||||
return { pong: 'cool' }
|
return { pong: 'cool' }
|
||||||
@@ -89,9 +89,10 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
branch = body.pull_request.base.ref
|
branch = body.pull_request.base.ref
|
||||||
}
|
}
|
||||||
if (!projectId || !branch) {
|
if (!projectId || !branch) {
|
||||||
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' }
|
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!', type: 'webhook' }
|
||||||
}
|
}
|
||||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (applicationsFound && applicationsFound.length > 0) {
|
if (applicationsFound && applicationsFound.length > 0) {
|
||||||
for (const application of applicationsFound) {
|
for (const application of applicationsFound) {
|
||||||
const buildId = cuid();
|
const buildId = cuid();
|
||||||
@@ -106,7 +107,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const checksum = Buffer.from(githubSignature, 'utf8');
|
const checksum = Buffer.from(githubSignature, 'utf8');
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||||
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?' }
|
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?', type: 'webhook' }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,7 +157,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
const sourceBranch = body.pull_request.head.ref
|
const sourceBranch = body.pull_request.head.ref
|
||||||
const sourceRepository = body.pull_request.head.repo.full_name
|
const sourceRepository = body.pull_request.head.repo.full_name
|
||||||
if (!allowedActions.includes(pullmergeRequestAction)) {
|
if (!allowedActions.includes(pullmergeRequestAction)) {
|
||||||
throw { status: 500, message: 'Action not allowed.' }
|
throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (application.settings.previews) {
|
if (application.settings.previews) {
|
||||||
@@ -168,7 +169,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
if (!isRunning) {
|
if (!isRunning) {
|
||||||
throw { status: 500, message: 'Application not running.' }
|
throw { status: 500, message: 'Application not running.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
@@ -192,7 +193,7 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
data: {
|
data: {
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
sourceBranch,
|
sourceBranch,
|
||||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
|
||||||
application: { connect: { id: application.id } }
|
application: { connect: { id: application.id } }
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -257,8 +258,8 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -44,8 +44,9 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
|
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
|
||||||
const webhookToken = request.headers['x-gitlab-token'];
|
const webhookToken = request.headers['x-gitlab-token'];
|
||||||
if (!webhookToken && !isDev) {
|
if (!webhookToken && !isDev) {
|
||||||
throw { status: 500, message: 'Invalid webhookToken.' }
|
throw { status: 500, message: 'Invalid webhookToken.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
|
const settings = await prisma.setting.findUnique({ where: { id: '0' } });
|
||||||
if (objectKind === 'push') {
|
if (objectKind === 'push') {
|
||||||
const projectId = Number(project_id);
|
const projectId = Number(project_id);
|
||||||
const branch = ref.split('/')[2];
|
const branch = ref.split('/')[2];
|
||||||
@@ -95,10 +96,10 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
const pullmergeRequestId = request.body.object_attributes.iid.toString();
|
const pullmergeRequestId = request.body.object_attributes.iid.toString();
|
||||||
const projectId = Number(id);
|
const projectId = Number(id);
|
||||||
if (!allowedActions.includes(action)) {
|
if (!allowedActions.includes(action)) {
|
||||||
throw { status: 500, message: 'Action not allowed.' }
|
throw { status: 500, message: 'Action not allowed.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (isDraft) {
|
if (isDraft) {
|
||||||
throw { status: 500, message: 'Draft MR, do nothing.' }
|
throw { status: 500, message: 'Draft MR, do nothing.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
||||||
if (applicationsFound && applicationsFound.length > 0) {
|
if (applicationsFound && applicationsFound.length > 0) {
|
||||||
@@ -113,11 +114,11 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
if (!isRunning) {
|
if (!isRunning) {
|
||||||
throw { status: 500, message: 'Application not running.' }
|
throw { status: 500, message: 'Application not running.', type: 'webhook' }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
||||||
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!' }
|
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!', type: 'webhook' }
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
action === 'opened' ||
|
action === 'opened' ||
|
||||||
@@ -140,7 +141,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
data: {
|
data: {
|
||||||
pullmergeRequestId,
|
pullmergeRequestId,
|
||||||
sourceBranch,
|
sourceBranch,
|
||||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
customDomain: `${protocol}${pullmergeRequestId}${settings.previewSeparator}${getDomain(application.fqdn)}`,
|
||||||
application: { connect: { id: application.id } }
|
application: { connect: { id: application.id } }
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -188,7 +189,7 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message, type }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message, type })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { FastifyRequest } from "fastify";
|
import { FastifyRequest } from "fastify";
|
||||||
import { errorHandler, getDomain, isDev, prisma, executeDockerCmd, fixType } from "../../../lib/common";
|
import { errorHandler, getDomain, isDev, prisma, executeCommand } from "../../../lib/common";
|
||||||
import { getTemplates } from "../../../lib/services";
|
import { getTemplates } from "../../../lib/services";
|
||||||
import { OnlyId } from "../../../types";
|
import { OnlyId } from "../../../types";
|
||||||
|
|
||||||
@@ -171,8 +171,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const { id = null } = request.params;
|
const { id = null } = request.params;
|
||||||
const settings = await prisma.setting.findFirst();
|
const coolifySettings = await prisma.setting.findFirst();
|
||||||
if (settings.isTraefikUsed && settings.proxyDefaultRedirect) {
|
if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) {
|
||||||
traefik.http.routers['catchall-http'] = {
|
traefik.http.routers['catchall-http'] = {
|
||||||
entrypoints: ["web"],
|
entrypoints: ["web"],
|
||||||
rule: "HostRegexp(`{catchall:.*}`)",
|
rule: "HostRegexp(`{catchall:.*}`)",
|
||||||
@@ -190,7 +190,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
traefik.http.middlewares['redirect-regexp'] = {
|
traefik.http.middlewares['redirect-regexp'] = {
|
||||||
redirectregex: {
|
redirectregex: {
|
||||||
regex: '(.*)',
|
regex: '(.*)',
|
||||||
replacement: settings.proxyDefaultRedirect,
|
replacement: coolifySettings.proxyDefaultRedirect,
|
||||||
permanent: false
|
permanent: false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -263,12 +263,14 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
const runningContainers = {}
|
const runningContainers = {}
|
||||||
applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||||
for (const dockerId of dockerIds) {
|
for (const dockerId of dockerIds) {
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||||
|
if (container) {
|
||||||
const containersArray = container.trim().split('\n');
|
const containersArray = container.trim().split('\n');
|
||||||
if (containersArray.length > 0) {
|
if (containersArray.length > 0) {
|
||||||
runningContainers[dockerId] = containersArray
|
runningContainers[dockerId] = containersArray
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (const application of applications) {
|
for (const application of applications) {
|
||||||
try {
|
try {
|
||||||
const {
|
const {
|
||||||
@@ -287,11 +289,10 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
if (
|
if (
|
||||||
!runningContainers[destinationDockerId] ||
|
!runningContainers[destinationDockerId] ||
|
||||||
runningContainers[destinationDockerId].length === 0 ||
|
runningContainers[destinationDockerId].length === 0 ||
|
||||||
!runningContainers[destinationDockerId].includes(id)
|
runningContainers[destinationDockerId].filter((container) => container.startsWith(id)).length === 0
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (buildPack === 'compose') {
|
if (buildPack === 'compose') {
|
||||||
const services = Object.entries(JSON.parse(dockerComposeConfiguration))
|
const services = Object.entries(JSON.parse(dockerComposeConfiguration))
|
||||||
if (services.length > 0) {
|
if (services.length > 0) {
|
||||||
@@ -333,7 +334,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
||||||
if (previews) {
|
if (previews) {
|
||||||
const { stdout } = await executeDockerCmd({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
|
const { stdout } = await executeCommand({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
|
||||||
|
if (stdout) {
|
||||||
const containers = stdout
|
const containers = stdout
|
||||||
.trim()
|
.trim()
|
||||||
.split('\n')
|
.split('\n')
|
||||||
@@ -341,7 +343,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
.map((c) => c.replace(/"/g, ''));
|
.map((c) => c.replace(/"/g, ''));
|
||||||
if (containers.length > 0) {
|
if (containers.length > 0) {
|
||||||
for (const container of containers) {
|
for (const container of containers) {
|
||||||
const previewDomain = `${container.split('-')[1]}.${domain}`;
|
const previewDomain = `${container.split('-')[1]}${coolifySettings.previewSeparator}${domain}`;
|
||||||
const nakedDomain = previewDomain.replace(/^www\./, '');
|
const nakedDomain = previewDomain.replace(/^www\./, '');
|
||||||
const pathPrefix = '/'
|
const pathPrefix = '/'
|
||||||
const serviceId = `${container}-${port || 'default'}`
|
const serviceId = `${container}-${port || 'default'}`
|
||||||
@@ -350,6 +352,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
}
|
}
|
||||||
@@ -360,12 +363,14 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
const runningContainers = {}
|
const runningContainers = {}
|
||||||
services.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
services.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||||
for (const dockerId of dockerIds) {
|
for (const dockerId of dockerIds) {
|
||||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||||
|
if (container) {
|
||||||
const containersArray = container.trim().split('\n');
|
const containersArray = container.trim().split('\n');
|
||||||
if (containersArray.length > 0) {
|
if (containersArray.length > 0) {
|
||||||
runningContainers[dockerId] = containersArray
|
runningContainers[dockerId] = containersArray
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
for (const service of services) {
|
for (const service of services) {
|
||||||
try {
|
try {
|
||||||
let {
|
let {
|
||||||
@@ -396,8 +401,8 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
}
|
}
|
||||||
found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id));
|
found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id));
|
||||||
for (const oneService of Object.keys(found.services)) {
|
for (const oneService of Object.keys(found.services)) {
|
||||||
const isProxyConfiguration = found.services[oneService].proxy;
|
const isDomainConfiguration = found?.services[oneService]?.proxy?.filter(p => p.domain) ?? [];
|
||||||
if (isProxyConfiguration) {
|
if (isDomainConfiguration.length > 0) {
|
||||||
const { proxy } = found.services[oneService];
|
const { proxy } = found.services[oneService];
|
||||||
for (let configuration of proxy) {
|
for (let configuration of proxy) {
|
||||||
if (configuration.domain) {
|
if (configuration.domain) {
|
||||||
@@ -432,11 +437,14 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (found.services[oneService].ports && found.services[oneService].ports.length > 0) {
|
if (found.services[oneService].ports && found.services[oneService].ports.length > 0) {
|
||||||
let port = found.services[oneService].ports[0]
|
for (let [index, port] of found.services[oneService].ports.entries()) {
|
||||||
|
if (port == 22) continue;
|
||||||
|
if (index === 0) {
|
||||||
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port')
|
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port')
|
||||||
if (foundPortVariable) {
|
if (foundPortVariable) {
|
||||||
port = foundPortVariable.value
|
port = foundPortVariable.value
|
||||||
}
|
}
|
||||||
|
}
|
||||||
const domain = getDomain(fqdn);
|
const domain = getDomain(fqdn);
|
||||||
const nakedDomain = domain.replace(/^www\./, '');
|
const nakedDomain = domain.replace(/^www\./, '');
|
||||||
const isHttps = fqdn.startsWith('https://');
|
const isHttps = fqdn.startsWith('https://');
|
||||||
@@ -449,6 +457,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error)
|
console.log(error)
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
apps/backup/.dockerignore
Normal file
2
apps/backup/.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
backup/*
|
||||||
27
apps/backup/Dockerfile
Normal file
27
apps/backup/Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
ARG PNPM_VERSION=7.17.1
|
||||||
|
|
||||||
|
FROM node:18-slim as build
|
||||||
|
WORKDIR /app
|
||||||
|
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||||
|
|
||||||
|
COPY ./package*.json .
|
||||||
|
RUN pnpm install -p
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Production build
|
||||||
|
FROM node:18-slim
|
||||||
|
ARG DOCKER_VERSION=20.10.18
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
ENV NODE_ENV production
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt update && apt -y install curl
|
||||||
|
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||||
|
RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/docker-$DOCKER_VERSION -o /usr/bin/docker
|
||||||
|
RUN chmod +x /usr/bin/docker
|
||||||
|
COPY --from=minio/mc:latest /usr/bin/mc /usr/bin/mc
|
||||||
|
COPY --from=build /app/ .
|
||||||
|
|
||||||
|
ENV CHECKPOINT_DISABLE=1
|
||||||
|
CMD node /app/src/index.mjs
|
||||||
0
apps/backup/backups/.gitkeep
Normal file
0
apps/backup/backups/.gitkeep
Normal file
24
apps/backup/package.json
Normal file
24
apps/backup/package.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"name": "backup",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "",
|
||||||
|
"author": "Andras Bacsai",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"main": "index.mjs",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"start": "NODE_ENV=production node src/index.mjs",
|
||||||
|
"dev": "pnpm cleanup && NODE_ENV=development node src/index.mjs",
|
||||||
|
"build": "docker build -t backup .",
|
||||||
|
"test": "pnpm build && docker run -ti --rm -v /var/run/docker.sock:/var/run/docker.sock -v /root/devel/coolify/apps/backup/backups:/app/backups -e CONTAINERS_TO_BACKUP='clatmhc6000008lvb5a5tnvsk:database:mysql:local' backup",
|
||||||
|
"cleanup": "rm -rf backups/*"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.222.0",
|
||||||
|
"@aws-sdk/lib-storage": "^3.222.0",
|
||||||
|
"cuid": "2.1.8",
|
||||||
|
"dotenv": "16.0.3",
|
||||||
|
"zx": "7.1.1"
|
||||||
|
}
|
||||||
|
}
|
||||||
126
apps/backup/src/index.mjs
Normal file
126
apps/backup/src/index.mjs
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import * as dotenv from 'dotenv';
|
||||||
|
dotenv.config()
|
||||||
|
|
||||||
|
import 'zx/globals';
|
||||||
|
import cuid from 'cuid';
|
||||||
|
import { S3, PutObjectCommand } from "@aws-sdk/client-s3";
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
const isDev = process.env.NODE_ENV === 'development'
|
||||||
|
$.verbose = !!isDev
|
||||||
|
|
||||||
|
if (!process.env.CONTAINERS_TO_BACKUP && !isDev) {
|
||||||
|
console.log(chalk.red(`No containers to backup!`))
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
const mysqlGzipLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:local';
|
||||||
|
const mysqlRawLocal = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:raw:local';
|
||||||
|
const postgresqlGzipLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:gzip:local';
|
||||||
|
const postgresqlRawLocal = 'clb6c15yi00008lpuezop7cy0:database:postgresql:raw:local';
|
||||||
|
|
||||||
|
const minio = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:minio|http|min.arm.coolify.io|backups|<access_key>|<secret_key>';
|
||||||
|
const digitalOcean = 'clb6c9ue4000a8lputdd5g1cl:database:mysql:gzip:do|https|fra1.digitaloceanspaces.com|backups|<access_key>|<secret_key>';
|
||||||
|
|
||||||
|
const devContainers = [mysqlGzipLocal, mysqlRawLocal, postgresqlGzipLocal, postgresqlRawLocal]
|
||||||
|
|
||||||
|
const containers = isDev
|
||||||
|
? devContainers
|
||||||
|
: process.env.CONTAINERS_TO_BACKUP.split(',')
|
||||||
|
|
||||||
|
const backup = async (container) => {
|
||||||
|
const id = cuid()
|
||||||
|
const [name, backupType, type, zipped, storage] = container.split(':')
|
||||||
|
const directory = `backups`;
|
||||||
|
const filename = zipped === 'raw'
|
||||||
|
? `${name}-${type}-${backupType}-${new Date().getTime()}.sql`
|
||||||
|
: `${name}-${type}-${backupType}-${new Date().getTime()}.tgz`
|
||||||
|
const backup = `${directory}/${filename}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await $`docker inspect ${name.split(' ')[0]}`.quiet()
|
||||||
|
if (backupType === 'database') {
|
||||||
|
if (type === 'mysql') {
|
||||||
|
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||||
|
const { stdout: rootPassword } = await $`docker exec ${name} printenv MYSQL_ROOT_PASSWORD`.quiet()
|
||||||
|
if (zipped === 'raw') {
|
||||||
|
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" > ${backup}`
|
||||||
|
} else if (zipped === 'gzip') {
|
||||||
|
await $`docker exec ${name} sh -c "exec mysqldump --all-databases -uroot -p${rootPassword.trim()}" | gzip > ${backup}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (type === 'postgresql') {
|
||||||
|
console.log(chalk.blue(`Backing up ${name}:${type}...`))
|
||||||
|
const { stdout: userPassword } = await $`docker exec ${name} printenv POSTGRES_PASSWORD`
|
||||||
|
const { stdout: user } = await $`docker exec ${name} printenv POSTGRES_USER`
|
||||||
|
if (zipped === 'raw') {
|
||||||
|
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()}> ${backup}`
|
||||||
|
} else if (zipped === 'gzip') {
|
||||||
|
await $`docker exec ${name} sh -c "exec pg_dumpall -c -U${user.trim()}" -W${userPassword.trim()} | gzip > ${backup}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const [storageType, ...storageArgs] = storage.split('|')
|
||||||
|
if (storageType !== 'local') {
|
||||||
|
let s3Protocol, s3Url, s3Bucket, s3Key, s3Secret = null
|
||||||
|
if (storageArgs.length > 0) {
|
||||||
|
[s3Protocol, s3Url, s3Bucket, s3Key, s3Secret] = storageArgs
|
||||||
|
}
|
||||||
|
if (storageType === 'minio') {
|
||||||
|
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||||
|
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
await $`mc alias set ${id} ${s3Protocol}://${s3Url} ${s3Key} ${s3Secret}`
|
||||||
|
await $`mc stat ${id}`
|
||||||
|
await $`mc cp ${backup} ${id}/${s3Bucket}`
|
||||||
|
await $`rm ${backup}`
|
||||||
|
await $`mc alias rm ${id}`
|
||||||
|
} else if (storageType === 'do') {
|
||||||
|
if (!s3Protocol || !s3Url || !s3Bucket || !s3Key || !s3Secret) {
|
||||||
|
console.log(chalk.red(`Invalid storage arguments for ${name}:${type}!`))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
console.log({ s3Protocol, s3Url, s3Bucket, s3Key, s3Secret })
|
||||||
|
console.log(chalk.blue(`Uploading ${name}:${type} to DigitalOcean Spaces...`))
|
||||||
|
const readstream = fs.createReadStream(backup)
|
||||||
|
const bucketParams = {
|
||||||
|
Bucket: s3Bucket,
|
||||||
|
Key: filename,
|
||||||
|
Body: readstream
|
||||||
|
};
|
||||||
|
const s3Client = new S3({
|
||||||
|
forcePathStyle: false,
|
||||||
|
endpoint: `${s3Protocol}://${s3Url}`,
|
||||||
|
region: "us-east-1",
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: s3Key,
|
||||||
|
secretAccessKey: s3Secret
|
||||||
|
},
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
const data = await s3Client.send(new PutObjectCommand(bucketParams));
|
||||||
|
console.log(chalk.green("Successfully uploaded backup: " +
|
||||||
|
bucketParams.Bucket +
|
||||||
|
"/" +
|
||||||
|
bucketParams.Key
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return data;
|
||||||
|
} catch (err) {
|
||||||
|
console.log("Error", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(chalk.green(`Backup of ${name}:${type} complete!`))
|
||||||
|
} catch (error) {
|
||||||
|
console.log(chalk.red(`Backup of ${name}:${type} failed!`))
|
||||||
|
console.log(chalk.red(error))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const promises = []
|
||||||
|
for (const container of containers) {
|
||||||
|
// await backup(container);
|
||||||
|
promises.push(backup(container))
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
||||||
13
apps/client/.eslintignore
Normal file
13
apps/client/.eslintignore
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
/build
|
||||||
|
/.svelte-kit
|
||||||
|
/package
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# Ignore files for PNPM, NPM and YARN
|
||||||
|
pnpm-lock.yaml
|
||||||
|
package-lock.json
|
||||||
|
yarn.lock
|
||||||
20
apps/client/.eslintrc.cjs
Normal file
20
apps/client/.eslintrc.cjs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
module.exports = {
|
||||||
|
root: true,
|
||||||
|
parser: '@typescript-eslint/parser',
|
||||||
|
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'],
|
||||||
|
plugins: ['svelte3', '@typescript-eslint'],
|
||||||
|
ignorePatterns: ['*.cjs'],
|
||||||
|
overrides: [{ files: ['*.svelte'], processor: 'svelte3/svelte3' }],
|
||||||
|
settings: {
|
||||||
|
'svelte3/typescript': () => require('typescript')
|
||||||
|
},
|
||||||
|
parserOptions: {
|
||||||
|
sourceType: 'module',
|
||||||
|
ecmaVersion: 2020
|
||||||
|
},
|
||||||
|
env: {
|
||||||
|
browser: true,
|
||||||
|
es2017: true,
|
||||||
|
node: true
|
||||||
|
}
|
||||||
|
};
|
||||||
10
apps/client/.gitignore
vendored
Normal file
10
apps/client/.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
/build
|
||||||
|
/.svelte-kit
|
||||||
|
/package
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
vite.config.js.timestamp-*
|
||||||
|
vite.config.ts.timestamp-*
|
||||||
1
apps/client/.npmrc
Normal file
1
apps/client/.npmrc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
engine-strict=true
|
||||||
13
apps/client/.prettierignore
Normal file
13
apps/client/.prettierignore
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
/build
|
||||||
|
/.svelte-kit
|
||||||
|
/package
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
|
||||||
|
# Ignore files for PNPM, NPM and YARN
|
||||||
|
pnpm-lock.yaml
|
||||||
|
package-lock.json
|
||||||
|
yarn.lock
|
||||||
9
apps/client/.prettierrc
Normal file
9
apps/client/.prettierrc
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"useTabs": true,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "none",
|
||||||
|
"printWidth": 100,
|
||||||
|
"plugins": ["prettier-plugin-svelte"],
|
||||||
|
"pluginSearchDirs": ["."],
|
||||||
|
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
|
||||||
|
}
|
||||||
1
apps/client/README.md
Normal file
1
apps/client/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# SvelteKit Static site
|
||||||
50
apps/client/package.json
Normal file
50
apps/client/package.json
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"name": "client",
|
||||||
|
"description": "Coolify's SvelteKit UI",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite dev",
|
||||||
|
"build": "vite build && cp -Pr build/ ../../build/public",
|
||||||
|
"preview": "vite preview",
|
||||||
|
"test": "playwright test",
|
||||||
|
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||||
|
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
|
||||||
|
"lint": "prettier --plugin-search-dir . --check . && eslint .",
|
||||||
|
"format": "prettier --plugin-search-dir . --write ."
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@playwright/test": "1.28.1",
|
||||||
|
"@sveltejs/adapter-static": "1.0.0-next.48",
|
||||||
|
"@sveltejs/kit": "1.0.0-next.572",
|
||||||
|
"@types/js-cookie": "3.0.2",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.44.0",
|
||||||
|
"@typescript-eslint/parser": "5.44.0",
|
||||||
|
"autoprefixer": "10.4.13",
|
||||||
|
"eslint": "8.28.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"eslint-plugin-svelte3": "4.0.0",
|
||||||
|
"postcss": "8.4.19",
|
||||||
|
"postcss-load-config": "4.0.1",
|
||||||
|
"prettier": "2.8.0",
|
||||||
|
"prettier-plugin-svelte": "2.8.1",
|
||||||
|
"svelte": "3.53.1",
|
||||||
|
"svelte-check": "2.9.2",
|
||||||
|
"svelte-preprocess": "^4.10.7",
|
||||||
|
"tailwindcss": "3.2.4",
|
||||||
|
"tslib": "2.4.1",
|
||||||
|
"typescript": "4.9.3",
|
||||||
|
"vite": "3.2.4"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"dependencies": {
|
||||||
|
"@trpc/client": "10.1.0",
|
||||||
|
"@trpc/server": "10.1.0",
|
||||||
|
"cuid": "2.1.8",
|
||||||
|
"daisyui": "2.41.0",
|
||||||
|
"flowbite-svelte": "0.28.0",
|
||||||
|
"js-cookie": "3.0.1",
|
||||||
|
"server": "workspace:*",
|
||||||
|
"superjson": "1.11.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
10
apps/client/playwright.config.ts
Normal file
10
apps/client/playwright.config.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import type { PlaywrightTestConfig } from '@playwright/test';
|
||||||
|
|
||||||
|
const config: PlaywrightTestConfig = {
|
||||||
|
webServer: {
|
||||||
|
command: 'npm run build && npm run preview',
|
||||||
|
port: 4173
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default config;
|
||||||
1793
apps/client/pnpm-lock.yaml
generated
Normal file
1793
apps/client/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
13
apps/client/postcss.config.cjs
Normal file
13
apps/client/postcss.config.cjs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
const tailwindcss = require('tailwindcss');
|
||||||
|
const autoprefixer = require('autoprefixer');
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
plugins: [
|
||||||
|
//Some plugins, like tailwindcss/nesting, need to run before Tailwind,
|
||||||
|
tailwindcss(),
|
||||||
|
//But others, like autoprefixer, need to run after,
|
||||||
|
autoprefixer
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = config;
|
||||||
9
apps/client/src/app.d.ts
vendored
Normal file
9
apps/client/src/app.d.ts
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
// See https://kit.svelte.dev/docs/types#app
|
||||||
|
// for information about these interfaces
|
||||||
|
// and what to do when importing types
|
||||||
|
declare namespace App {
|
||||||
|
// interface Locals {}
|
||||||
|
// interface PageData {}
|
||||||
|
// interface Error {}
|
||||||
|
// interface Platform {}
|
||||||
|
}
|
||||||
12
apps/client/src/app.html
Normal file
12
apps/client/src/app.html
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width" />
|
||||||
|
%sveltekit.head%
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="h-screen">%sveltekit.body%</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
284
apps/client/src/app.postcss
Normal file
284
apps/client/src/app.postcss
Normal file
@@ -0,0 +1,284 @@
|
|||||||
|
/* Write your global styles here, in PostCSS syntax */
|
||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Poppins';
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: local(''), url('/poppins-v19-latin-ext_latin_devanagari-regular.woff2') format('woff2'),
|
||||||
|
url('/poppins-v19-latin-ext_latin_devanagari-regular.woff') format('woff');
|
||||||
|
}
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Poppins';
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 500;
|
||||||
|
src: local(''), url('/poppins-v19-latin-ext_latin_devanagari-500.woff2') format('woff2'),
|
||||||
|
url('/poppins-v19-latin-ext_latin_devanagari-500.woff') format('woff');
|
||||||
|
}
|
||||||
|
|
||||||
|
button {
|
||||||
|
@apply text-sm !important;
|
||||||
|
}
|
||||||
|
html {
|
||||||
|
@apply h-full min-h-full overflow-y-scroll;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
@apply min-h-screen overflow-x-hidden bg-coolblack text-sm text-white scrollbar-w-1 scrollbar-thumb-coollabs scrollbar-track-coolgray-200;
|
||||||
|
}
|
||||||
|
|
||||||
|
input,
|
||||||
|
.input {
|
||||||
|
@apply h-12 w-96 rounded border border-transparent bg-coolgray-200 p-2 text-xs tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:border disabled:border-dashed disabled:border-coolgray-200 disabled:bg-transparent disabled:bg-coolblack md:text-sm;
|
||||||
|
}
|
||||||
|
textarea {
|
||||||
|
@apply min-w-[14rem] rounded border border-transparent bg-coolgray-200 p-2 text-xs tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:border disabled:border-dashed disabled:border-coolgray-200 disabled:bg-transparent md:text-sm;
|
||||||
|
}
|
||||||
|
|
||||||
|
#svelte .custom-select-wrapper .selectContainer.disabled input {
|
||||||
|
@apply placeholder:text-stone-600;
|
||||||
|
}
|
||||||
|
|
||||||
|
#svelte .custom-select-wrapper .selectContainer input {
|
||||||
|
@apply text-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
#svelte .custom-select-wrapper .selectContainer {
|
||||||
|
@apply h-12 rounded bg-coolgray-200 p-2 px-0 text-xs tracking-tight outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 md:text-sm;
|
||||||
|
}
|
||||||
|
|
||||||
|
#svelte .listContainer {
|
||||||
|
@apply bg-coolgray-400 text-white scrollbar-w-2 scrollbar-thumb-green-500 scrollbar-track-coolgray-200;
|
||||||
|
}
|
||||||
|
#svelte .selectedItem {
|
||||||
|
@apply pl-2;
|
||||||
|
}
|
||||||
|
|
||||||
|
#svelte .item.hover {
|
||||||
|
@apply bg-coollabs text-white !important;
|
||||||
|
}
|
||||||
|
#svelte .item.active {
|
||||||
|
@apply bg-coolgray-100 text-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
@apply h-12 w-96 rounded bg-coolgray-200 p-2 text-xs font-bold tracking-tight text-white placeholder-stone-600 outline-none transition duration-150 hover:bg-coolgray-500 focus:bg-coolgray-500 disabled:text-stone-600 md:text-sm;
|
||||||
|
}
|
||||||
|
.custom-select-wrapper {
|
||||||
|
--background: rgb(32 32 32);
|
||||||
|
--inputColor: white;
|
||||||
|
--multiItemPadding: 0;
|
||||||
|
--multiSelectPadding: 0 0.5rem 0 0.5rem;
|
||||||
|
--border: none;
|
||||||
|
--placeholderColor: rgb(87 83 78);
|
||||||
|
--listBackground: rgb(32 32 32);
|
||||||
|
--itemColor: white;
|
||||||
|
--itemHoverBG: rgb(107 22 237);
|
||||||
|
--multiItemBG: rgb(32 32 32);
|
||||||
|
--multiClearHoverBG: transparent;
|
||||||
|
--multiClearHoverFill: rgb(239 68 68);
|
||||||
|
--multiItemActiveBG: transparent;
|
||||||
|
--multiClearBG: transparent;
|
||||||
|
--clearSelectFocusColor: white;
|
||||||
|
--clearSelectHoverColor: rgb(239 68 68);
|
||||||
|
--multiItemBorderRadius: 0.25rem;
|
||||||
|
--listShadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
@apply inline-block;
|
||||||
|
}
|
||||||
|
.btn {
|
||||||
|
@apply text-white text-base min-w-fit no-animation;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
@apply underline hover:text-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content {
|
||||||
|
@apply p-2 px-4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.title {
|
||||||
|
@apply text-lg lg:text-2xl font-bold;
|
||||||
|
}
|
||||||
|
.subtitle {
|
||||||
|
@apply text-lg lg:text-xl font-bold text-indigo-300;
|
||||||
|
}
|
||||||
|
.label {
|
||||||
|
@apply text-sm leading-6 font-semibold text-sky-500 dark:text-sky-400;
|
||||||
|
}
|
||||||
|
.card {
|
||||||
|
@apply border bg-coolgray-100 border-coolgray-200 rounded p-2 space-y-2 sticky top-4 mb-2 items-center;
|
||||||
|
}
|
||||||
|
.icon-holder {
|
||||||
|
overflow: hidden;
|
||||||
|
height: 30px;
|
||||||
|
border-radius: 5px;
|
||||||
|
margin-right: 8px;
|
||||||
|
background: linear-gradient(0deg, #999, #ddd);
|
||||||
|
}
|
||||||
|
.instance-status-running {
|
||||||
|
box-shadow: 1px 4px 5px #3df721;
|
||||||
|
}
|
||||||
|
.instance-status-stopped {
|
||||||
|
box-shadow: 1px 4px 5px rgb(110, 191, 225);
|
||||||
|
}
|
||||||
|
.instance-status-error {
|
||||||
|
box-shadow: 1px 4px 5px #fb00ff;
|
||||||
|
}
|
||||||
|
.instance-status-degraded {
|
||||||
|
box-shadow: 1px 4px 5px #f7b121;
|
||||||
|
}
|
||||||
|
.badge-status-healthy,
|
||||||
|
.badge-status-running {
|
||||||
|
@apply text-green-500;
|
||||||
|
}
|
||||||
|
.badge-status-degraded {
|
||||||
|
@apply text-green-500;
|
||||||
|
}
|
||||||
|
.badge-status-stopped {
|
||||||
|
@apply text-sky-500;
|
||||||
|
}
|
||||||
|
.delete-button {
|
||||||
|
@apply bg-red-600;
|
||||||
|
}
|
||||||
|
.delete-button:hover {
|
||||||
|
@apply bg-red-500;
|
||||||
|
}
|
||||||
|
/* Interchange menu position */
|
||||||
|
.menu-left {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
}
|
||||||
|
.menu-left .menu-bar {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.menu-left .menu-bar > * {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.menu-top {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.menu-top .menu-bar {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
}
|
||||||
|
.menu-top .menu-bar > * {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-main {
|
||||||
|
@apply fixed top-0 left-0 min-h-screen w-16 min-w-[4rem] overflow-hidden border-r border-stone-800 bg-coolgray-200 scrollbar-w-1 scrollbar-thumb-coollabs scrollbar-track-coolgray-200 xl:overflow-visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-side {
|
||||||
|
@apply absolute right-0 top-0 z-50 m-5 flex flex-wrap items-center justify-end space-x-2 bg-coolblack/40 text-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.add-icon {
|
||||||
|
@apply rounded p-1 transition duration-200;
|
||||||
|
}
|
||||||
|
|
||||||
|
.icons {
|
||||||
|
@apply rounded p-2 transition duration-200 hover:bg-coolgray-500 disabled:bg-coolblack disabled:text-coolgray-500 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.arrow-right-applications {
|
||||||
|
@apply -ml-6 px-2 font-bold text-green-500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.border-gradient {
|
||||||
|
border-bottom: 2px solid transparent;
|
||||||
|
-o-border-image: linear-gradient(
|
||||||
|
0.25turn,
|
||||||
|
rgba(255, 249, 34),
|
||||||
|
rgba(255, 0, 128),
|
||||||
|
rgba(56, 2, 155, 0)
|
||||||
|
);
|
||||||
|
border-image: linear-gradient(
|
||||||
|
0.25turn,
|
||||||
|
rgba(255, 249, 34),
|
||||||
|
rgba(255, 0, 128),
|
||||||
|
rgba(56, 2, 155, 0)
|
||||||
|
);
|
||||||
|
border-image-slice: 1;
|
||||||
|
}
|
||||||
|
.border-gradient-full {
|
||||||
|
border: 4px solid transparent;
|
||||||
|
-o-border-image: linear-gradient(
|
||||||
|
0.25turn,
|
||||||
|
rgba(255, 249, 34),
|
||||||
|
rgba(255, 0, 128),
|
||||||
|
rgba(56, 2, 155, 0)
|
||||||
|
);
|
||||||
|
border-image: linear-gradient(
|
||||||
|
0.25turn,
|
||||||
|
rgba(255, 249, 34),
|
||||||
|
rgba(255, 0, 128),
|
||||||
|
rgba(56, 2, 155, 0)
|
||||||
|
);
|
||||||
|
border-image-slice: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.box-selection {
|
||||||
|
@apply min-w-[16rem] justify-center rounded border-transparent bg-coolgray-200 p-6 hover:border-transparent hover:bg-coolgray-400;
|
||||||
|
}
|
||||||
|
|
||||||
|
.lds-heart {
|
||||||
|
animation: lds-heart 1.2s infinite cubic-bezier(0.215, 0.61, 0.355, 1);
|
||||||
|
}
|
||||||
|
@keyframes lds-heart {
|
||||||
|
0% {
|
||||||
|
transform: scale(1);
|
||||||
|
}
|
||||||
|
5% {
|
||||||
|
transform: scale(1.2);
|
||||||
|
}
|
||||||
|
39% {
|
||||||
|
transform: scale(0.85);
|
||||||
|
}
|
||||||
|
45% {
|
||||||
|
transform: scale(1);
|
||||||
|
}
|
||||||
|
60% {
|
||||||
|
transform: scale(0.95);
|
||||||
|
}
|
||||||
|
100% {
|
||||||
|
transform: scale(0.9);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.sub-menu {
|
||||||
|
@apply w-48 text-base font-bold hover:bg-coolgray-500 rounded p-2 hover:text-white text-stone-200 cursor-pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sub-menu-active {
|
||||||
|
@apply bg-coolgray-500 text-white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table tbody td,
|
||||||
|
.table tbody th,
|
||||||
|
.table thead th {
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
.table * {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header {
|
||||||
|
@apply flex flex-row z-10 w-full py-5 px-5;
|
||||||
|
}
|
||||||
|
.burger {
|
||||||
|
@apply block m-[2px] h-[3px] w-5 rounded;
|
||||||
|
}
|
||||||
|
|
||||||
|
.bg-coollabs-gradient {
|
||||||
|
@apply bg-gradient-to-r from-purple-500 via-pink-500 to-red-500;
|
||||||
|
}
|
||||||
20
apps/client/src/lib/common.ts
Normal file
20
apps/client/src/lib/common.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { addToast } from './store';
|
||||||
|
|
||||||
|
export const asyncSleep = (delay: number) => new Promise((resolve) => setTimeout(resolve, delay));
|
||||||
|
|
||||||
|
export function errorNotification(error: any | { message: string }): void {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
addToast({
|
||||||
|
message: error.message,
|
||||||
|
type: 'error'
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
addToast({
|
||||||
|
message: error,
|
||||||
|
type: 'error'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export function getRndInteger(min: number, max: number) {
|
||||||
|
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||||
|
}
|
||||||
64
apps/client/src/lib/components/Toast.svelte
Normal file
64
apps/client/src/lib/components/Toast.svelte
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
<script>
|
||||||
|
import { createEventDispatcher } from 'svelte';
|
||||||
|
const dispatch = createEventDispatcher();
|
||||||
|
export let type = 'info';
|
||||||
|
function success() {
|
||||||
|
if (type === 'success') {
|
||||||
|
return 'bg-dark lg:bg-primary';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||||
|
<div
|
||||||
|
on:click={() => dispatch('click')}
|
||||||
|
on:mouseover={() => dispatch('pause')}
|
||||||
|
on:focus={() => dispatch('pause')}
|
||||||
|
on:mouseout={() => dispatch('resume')}
|
||||||
|
on:blur={() => dispatch('resume')}
|
||||||
|
class={` flex flex-row justify-center alert shadow-lg text-white hover:scale-105 transition-all duration-100 cursor-pointer rounded ${success()}`}
|
||||||
|
class:alert-error={type === 'error'}
|
||||||
|
class:alert-info={type === 'info'}
|
||||||
|
>
|
||||||
|
{#if type === 'success'}
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="stroke-current flex-shrink-0 h-6 w-6"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
><path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
stroke-width="2"
|
||||||
|
d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||||
|
/></svg
|
||||||
|
>
|
||||||
|
{:else if type === 'error'}
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="stroke-current flex-shrink-0 h-6 w-6"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
><path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
stroke-width="2"
|
||||||
|
d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||||
|
/></svg
|
||||||
|
>
|
||||||
|
{:else if type === 'info'}
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
class="stroke-current flex-shrink-0 w-6 h-6"
|
||||||
|
><path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
stroke-width="2"
|
||||||
|
d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||||
|
/></svg
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
<slot />
|
||||||
|
</div>
|
||||||
25
apps/client/src/lib/components/Toasts.svelte
Normal file
25
apps/client/src/lib/components/Toasts.svelte
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import Toast from './Toast.svelte';
|
||||||
|
import { dismissToast, pauseToast, resumeToast, toasts } from '$lib/store';
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if $toasts.length > 0}
|
||||||
|
<section>
|
||||||
|
<article class="toast toast-top toast-center rounded-none w-2/3 lg:w-[20rem]" role="alert">
|
||||||
|
{#each $toasts as toast (toast.id)}
|
||||||
|
<Toast
|
||||||
|
type={toast.type}
|
||||||
|
on:resume={() => resumeToast(toast.id)}
|
||||||
|
on:pause={() => pauseToast(toast.id)}
|
||||||
|
on:click={() => dismissToast(toast.id)}>{@html toast.message}</Toast
|
||||||
|
>
|
||||||
|
{/each}
|
||||||
|
</article>
|
||||||
|
</section>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<style lang="postcss">
|
||||||
|
section {
|
||||||
|
@apply fixed top-0 left-0 right-0 w-full flex flex-col mt-4 justify-center z-[1000];
|
||||||
|
}
|
||||||
|
</style>
|
||||||
10
apps/client/src/lib/components/Tooltip.svelte
Normal file
10
apps/client/src/lib/components/Tooltip.svelte
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { Tooltip } from 'flowbite-svelte';
|
||||||
|
export let placement = 'bottom';
|
||||||
|
export let color = 'bg-coollabs';
|
||||||
|
export let triggeredBy = '#tooltip-default';
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Tooltip {triggeredBy} {placement} arrow={false} defaultClass={color + ' font-thin text-xs text-left border-none p-2'} style="custom"
|
||||||
|
><slot /></Tooltip
|
||||||
|
>
|
||||||
206
apps/client/src/lib/components/UpdateAvailable.svelte
Normal file
206
apps/client/src/lib/components/UpdateAvailable.svelte
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { dev } from '$app/environment';
|
||||||
|
import {
|
||||||
|
addToast,
|
||||||
|
appSession,
|
||||||
|
features,
|
||||||
|
updateLoading,
|
||||||
|
isUpdateAvailable,
|
||||||
|
latestVersion
|
||||||
|
} from '$lib/store';
|
||||||
|
import { asyncSleep, errorNotification } from '$lib/common';
|
||||||
|
import { onMount } from 'svelte';
|
||||||
|
import Tooltip from './Tooltip.svelte';
|
||||||
|
|
||||||
|
let updateStatus: any = {
|
||||||
|
found: false,
|
||||||
|
loading: false,
|
||||||
|
success: null
|
||||||
|
};
|
||||||
|
async function update() {
|
||||||
|
updateStatus.loading = true;
|
||||||
|
try {
|
||||||
|
if (dev) {
|
||||||
|
localStorage.setItem('lastVersion', $appSession.version);
|
||||||
|
await asyncSleep(1000);
|
||||||
|
updateStatus.loading = false;
|
||||||
|
return window.location.reload();
|
||||||
|
} else {
|
||||||
|
localStorage.setItem('lastVersion', $appSession.version);
|
||||||
|
// await post(`/update`, { type: 'update', latestVersion: $latestVersion });
|
||||||
|
addToast({
|
||||||
|
message: 'Update completed.<br><br>Waiting for the new version to start...',
|
||||||
|
type: 'success'
|
||||||
|
});
|
||||||
|
|
||||||
|
let reachable = false;
|
||||||
|
let tries = 0;
|
||||||
|
do {
|
||||||
|
await asyncSleep(4000);
|
||||||
|
try {
|
||||||
|
// await get(`/undead`);
|
||||||
|
reachable = true;
|
||||||
|
} catch (error) {
|
||||||
|
reachable = false;
|
||||||
|
}
|
||||||
|
if (reachable) break;
|
||||||
|
tries++;
|
||||||
|
} while (!reachable || tries < 120);
|
||||||
|
addToast({
|
||||||
|
message: 'New version reachable. Reloading...',
|
||||||
|
type: 'success'
|
||||||
|
});
|
||||||
|
updateStatus.loading = false;
|
||||||
|
updateStatus.success = true;
|
||||||
|
await asyncSleep(3000);
|
||||||
|
return window.location.reload();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
updateStatus.success = false;
|
||||||
|
updateStatus.loading = false;
|
||||||
|
return errorNotification(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onMount(async () => {
|
||||||
|
if ($appSession.userId) {
|
||||||
|
const overrideVersion = $features.latestVersion;
|
||||||
|
if ($appSession.teamId === '0') {
|
||||||
|
if ($updateLoading === true) return;
|
||||||
|
try {
|
||||||
|
$updateLoading = true;
|
||||||
|
// const data = await get(`/update`);
|
||||||
|
if (overrideVersion || data?.isUpdateAvailable) {
|
||||||
|
$latestVersion = overrideVersion || data.latestVersion;
|
||||||
|
if (overrideVersion) {
|
||||||
|
$isUpdateAvailable = true;
|
||||||
|
} else {
|
||||||
|
$isUpdateAvailable = data.isUpdateAvailable;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return errorNotification(error);
|
||||||
|
} finally {
|
||||||
|
$updateLoading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="py-0 lg:py-2">
|
||||||
|
{#if $appSession.teamId === '0'}
|
||||||
|
{#if $isUpdateAvailable}
|
||||||
|
<button
|
||||||
|
id="update"
|
||||||
|
disabled={updateStatus.success === false}
|
||||||
|
on:click={update}
|
||||||
|
class="icons bg-coollabs-gradient text-white duration-75 hover:scale-105 w-full"
|
||||||
|
>
|
||||||
|
{#if updateStatus.loading}
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="lds-heart h-8 w-8 mx-auto"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<path
|
||||||
|
d="M19.5 13.572l-7.5 7.428l-7.5 -7.428m0 0a5 5 0 1 1 7.5 -6.566a5 5 0 1 1 7.5 6.572"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
{:else if updateStatus.success === null}
|
||||||
|
<div class="flex items-center justify-center space-x-2">
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="h-8 w-8"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<circle cx="12" cy="12" r="9" />
|
||||||
|
<line x1="12" y1="8" x2="8" y2="12" />
|
||||||
|
<line x1="12" y1="8" x2="12" y2="16" />
|
||||||
|
<line x1="16" y1="12" x2="12" y2="8" />
|
||||||
|
</svg>
|
||||||
|
<span class="flex lg:hidden">Update available</span>
|
||||||
|
</div>
|
||||||
|
{:else if updateStatus.success}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-8 w-8"
|
||||||
|
><path
|
||||||
|
fill="#DD2E44"
|
||||||
|
d="M11.626 7.488c-.112.112-.197.247-.268.395l-.008-.008L.134 33.141l.011.011c-.208.403.14 1.223.853 1.937.713.713 1.533 1.061 1.936.853l.01.01L28.21 24.735l-.008-.009c.147-.07.282-.155.395-.269 1.562-1.562-.971-6.627-5.656-11.313-4.687-4.686-9.752-7.218-11.315-5.656z"
|
||||||
|
/><path
|
||||||
|
fill="#EA596E"
|
||||||
|
d="M13 12L.416 32.506l-.282.635.011.011c-.208.403.14 1.223.853 1.937.232.232.473.408.709.557L17 17l-4-5z"
|
||||||
|
/><path
|
||||||
|
fill="#A0041E"
|
||||||
|
d="M23.012 13.066c4.67 4.672 7.263 9.652 5.789 11.124-1.473 1.474-6.453-1.118-11.126-5.788-4.671-4.672-7.263-9.654-5.79-11.127 1.474-1.473 6.454 1.119 11.127 5.791z"
|
||||||
|
/><path
|
||||||
|
fill="#AA8DD8"
|
||||||
|
d="M18.59 13.609c-.199.161-.459.245-.734.215-.868-.094-1.598-.396-2.109-.873-.541-.505-.808-1.183-.735-1.862.128-1.192 1.324-2.286 3.363-2.066.793.085 1.147-.17 1.159-.292.014-.121-.277-.446-1.07-.532-.868-.094-1.598-.396-2.11-.873-.541-.505-.809-1.183-.735-1.862.13-1.192 1.325-2.286 3.362-2.065.578.062.883-.057 1.012-.134.103-.063.144-.123.148-.158.012-.121-.275-.446-1.07-.532-.549-.06-.947-.552-.886-1.102.059-.549.55-.946 1.101-.886 2.037.219 2.973 1.542 2.844 2.735-.13 1.194-1.325 2.286-3.364 2.067-.578-.063-.88.057-1.01.134-.103.062-.145.123-.149.157-.013.122.276.446 1.071.532 2.037.22 2.973 1.542 2.844 2.735-.129 1.192-1.324 2.286-3.362 2.065-.578-.062-.882.058-1.012.134-.104.064-.144.124-.148.158-.013.121.276.446 1.07.532.548.06.947.553.886 1.102-.028.274-.167.511-.366.671z"
|
||||||
|
/><path
|
||||||
|
fill="#77B255"
|
||||||
|
d="M30.661 22.857c1.973-.557 3.334.323 3.658 1.478.324 1.154-.378 2.615-2.35 3.17-.77.216-1.001.584-.97.701.034.118.425.312 1.193.095 1.972-.555 3.333.325 3.657 1.479.326 1.155-.378 2.614-2.351 3.17-.769.216-1.001.585-.967.702.033.117.423.311 1.192.095.53-.149 1.084.16 1.233.691.148.532-.161 1.084-.693 1.234-1.971.555-3.333-.323-3.659-1.479-.324-1.154.379-2.613 2.353-3.169.77-.217 1.001-.584.967-.702-.032-.117-.422-.312-1.19-.096-1.974.556-3.334-.322-3.659-1.479-.325-1.154.378-2.613 2.351-3.17.768-.215.999-.585.967-.701-.034-.118-.423-.312-1.192-.096-.532.15-1.083-.16-1.233-.691-.149-.53.161-1.082.693-1.232z"
|
||||||
|
/><path
|
||||||
|
fill="#AA8DD8"
|
||||||
|
d="M23.001 20.16c-.294 0-.584-.129-.782-.375-.345-.432-.274-1.061.156-1.406.218-.175 5.418-4.259 12.767-3.208.547.078.927.584.849 1.131-.078.546-.58.93-1.132.848-6.493-.922-11.187 2.754-11.233 2.791-.186.148-.406.219-.625.219z"
|
||||||
|
/><path
|
||||||
|
fill="#77B255"
|
||||||
|
d="M5.754 16c-.095 0-.192-.014-.288-.042-.529-.159-.829-.716-.67-1.245 1.133-3.773 2.16-9.794.898-11.364-.141-.178-.354-.353-.842-.316-.938.072-.849 2.051-.848 2.071.042.551-.372 1.031-.922 1.072-.559.034-1.031-.372-1.072-.923-.103-1.379.326-4.035 2.692-4.214 1.056-.08 1.933.287 2.552 1.057 2.371 2.951-.036 11.506-.542 13.192-.13.433-.528.712-.958.712z"
|
||||||
|
/><circle fill="#5C913B" cx="25.5" cy="9.5" r="1.5" /><circle
|
||||||
|
fill="#9266CC"
|
||||||
|
cx="2"
|
||||||
|
cy="18"
|
||||||
|
r="2"
|
||||||
|
/><circle fill="#5C913B" cx="32.5" cy="19.5" r="1.5" /><circle
|
||||||
|
fill="#5C913B"
|
||||||
|
cx="23.5"
|
||||||
|
cy="31.5"
|
||||||
|
r="1.5"
|
||||||
|
/><circle fill="#FFCC4D" cx="28" cy="4" r="2" /><circle
|
||||||
|
fill="#FFCC4D"
|
||||||
|
cx="32.5"
|
||||||
|
cy="8.5"
|
||||||
|
r="1.5"
|
||||||
|
/><circle fill="#FFCC4D" cx="29.5" cy="12.5" r="1.5" /><circle
|
||||||
|
fill="#FFCC4D"
|
||||||
|
cx="7.5"
|
||||||
|
cy="23.5"
|
||||||
|
r="1.5"
|
||||||
|
/></svg
|
||||||
|
>
|
||||||
|
{:else}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-9 w-8"
|
||||||
|
><path
|
||||||
|
fill="#FFCC4D"
|
||||||
|
d="M36 18c0 9.941-8.059 18-18 18S0 27.941 0 18 8.059 0 18 0s18 8.059 18 18"
|
||||||
|
/><path
|
||||||
|
fill="#664500"
|
||||||
|
d="M22 27c0 2.763-1.791 3-4 3-2.21 0-4-.237-4-3 0-2.761 1.79-6 4-6 2.209 0 4 3.239 4 6zm8-12c-.124 0-.25-.023-.371-.072-5.229-2.091-7.372-5.241-7.461-5.374-.307-.46-.183-1.081.277-1.387.459-.306 1.077-.184 1.385.274.019.027 1.93 2.785 6.541 4.629.513.206.763.787.558 1.3-.157.392-.533.63-.929.63zM6 15c-.397 0-.772-.238-.929-.629-.205-.513.044-1.095.557-1.3 4.612-1.844 6.523-4.602 6.542-4.629.308-.456.929-.577 1.387-.27.457.308.581.925.275 1.383-.089.133-2.232 3.283-7.46 5.374C6.25 14.977 6.124 15 6 15z"
|
||||||
|
/><path fill="#5DADEC" d="M24 16h4v19l-4-.046V16zM8 35l4-.046V16H8v19z" /><path
|
||||||
|
fill="#664500"
|
||||||
|
d="M14.999 18c-.15 0-.303-.034-.446-.105-3.512-1.756-7.07-.018-7.105 0-.495.249-1.095.046-1.342-.447-.247-.494-.047-1.095.447-1.342.182-.09 4.498-2.197 8.895 0 .494.247.694.848.447 1.342-.176.35-.529.552-.896.552zm14 0c-.15 0-.303-.034-.446-.105-3.513-1.756-7.07-.018-7.105 0-.494.248-1.094.047-1.342-.447-.247-.494-.047-1.095.447-1.342.182-.09 4.501-2.196 8.895 0 .494.247.694.848.447 1.342-.176.35-.529.552-.896.552z"
|
||||||
|
/><ellipse fill="#5DADEC" cx="18" cy="34" rx="18" ry="2" /><ellipse
|
||||||
|
fill="#E75A70"
|
||||||
|
cx="18"
|
||||||
|
cy="27"
|
||||||
|
rx="3"
|
||||||
|
ry="2"
|
||||||
|
/></svg
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
</button>
|
||||||
|
<Tooltip triggeredBy="#update" placement="right" color="bg-coolgray-200 text-white"
|
||||||
|
>New Version Available!</Tooltip
|
||||||
|
>
|
||||||
|
{/if}
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
17
apps/client/src/lib/components/icons/Delete.svelte
Normal file
17
apps/client/src/lib/components/icons/Delete.svelte
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="h-6 w-6"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<line x1="4" y1="7" x2="20" y2="7" />
|
||||||
|
<line x1="10" y1="11" x2="10" y2="17" />
|
||||||
|
<line x1="14" y1="11" x2="14" y2="17" />
|
||||||
|
<path d="M5 7l1 12a2 2 0 0 0 2 2h8a2 2 0 0 0 2 -2l1 -12" />
|
||||||
|
<path d="M9 7v-3a1 1 0 0 1 1 -1h4a1 1 0 0 1 1 1v3" />
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 486 B |
10
apps/client/src/lib/components/icons/RemoteLink.svelte
Normal file
10
apps/client/src/lib/components/icons/RemoteLink.svelte
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="currentColor"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="3"
|
||||||
|
stroke="currentColor"
|
||||||
|
class="w-3 h-3 text-white"
|
||||||
|
>
|
||||||
|
<path stroke-linecap="round" stroke-linejoin="round" d="M4.5 19.5l15-15m0 0H8.25m11.25 0v11.25" />
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 262 B |
@@ -0,0 +1,47 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import * as Icons from '$lib/components/icons/applications';
|
||||||
|
export let application: any;
|
||||||
|
export let isAbsolute = true;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if application.buildPack?.toLowerCase() === 'rust'}
|
||||||
|
<Icons.Rust {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'node'}
|
||||||
|
<Icons.Nodejs {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'react'}
|
||||||
|
<Icons.React {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'svelte'}
|
||||||
|
<Icons.Svelte {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'vuejs'}
|
||||||
|
<Icons.Vuejs {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'php'}
|
||||||
|
<Icons.Php {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'python'}
|
||||||
|
<Icons.Python {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'static'}
|
||||||
|
<Icons.Static {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'nestjs'}
|
||||||
|
<Icons.Nestjs {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'nuxtjs'}
|
||||||
|
<Icons.Nuxtjs {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'nextjs'}
|
||||||
|
<Icons.Nextjs {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'gatsby'}
|
||||||
|
<Icons.Gatsby {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'docker'}
|
||||||
|
<Icons.Docker {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'astro'}
|
||||||
|
<Icons.Astro {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'eleventy'}
|
||||||
|
<Icons.Eleventy {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'deno'}
|
||||||
|
<Icons.Deno {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'laravel'}
|
||||||
|
<Icons.Laravel {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'heroku'}
|
||||||
|
<Icons.Heroku {isAbsolute} />
|
||||||
|
{:else if application.buildPack?.toLowerCase() === 'compose'}
|
||||||
|
<Icons.Compose {isAbsolute} />
|
||||||
|
{:else if application.simpleDockerfile}
|
||||||
|
<Icons.Docker {isAbsolute} />
|
||||||
|
{/if}
|
||||||
@@ -0,0 +1,25 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
export let isAbsolute = true;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svg
|
||||||
|
class={isAbsolute ? 'absolute top-0 left-0 -m-6 h-14 w-14' : 'mx-auto w-8 h-8'}
|
||||||
|
viewBox="0 0 256 256"
|
||||||
|
fill="none"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
id="a"
|
||||||
|
fill="#302649"
|
||||||
|
fill-rule="evenodd"
|
||||||
|
clip-rule="evenodd"
|
||||||
|
d="M163.008 18.929c1.944 2.413 2.935 5.67 4.917 12.181l43.309 142.27a180.277 180.277 0 00-51.778-17.53l-28.198-95.29a3.67 3.67 0 00-7.042.01l-27.857 95.232a180.225 180.225 0 00-52.01 17.557l43.52-142.281c1.99-6.502 2.983-9.752 4.927-12.16a15.999 15.999 0 016.484-4.798c2.872-1.154 6.271-1.154 13.07-1.154h31.085c6.807 0 10.211 0 13.086 1.157a16.004 16.004 0 016.487 4.806z"
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
id="flame"
|
||||||
|
fill="#EF661E"
|
||||||
|
fill-rule="evenodd"
|
||||||
|
clip-rule="evenodd"
|
||||||
|
d="M168.19 180.151c-7.139 6.105-21.39 10.268-37.804 10.268-20.147 0-37.033-6.272-41.513-14.707-1.602 4.835-1.961 10.367-1.961 13.902 0 0-1.056 17.355 11.015 29.426 0-6.268 5.081-11.349 11.349-11.349 10.743 0 10.731 9.373 10.721 16.977v.679c0 11.542 7.054 21.436 17.086 25.606a23.27 23.27 0 01-2.339-10.2c0-11.008 6.463-15.107 13.974-19.87 5.976-3.79 12.616-8.001 17.192-16.449a31.024 31.024 0 003.743-14.82c0-3.299-.513-6.479-1.463-9.463z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
export let isAbsolute = false;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<img
|
||||||
|
alt="docker compose logo"
|
||||||
|
class={isAbsolute ? 'w-16 h-16 absolute top-0 left-0 -m-8' : 'w-8 h-8 mx-auto'}
|
||||||
|
src="/icons/compose.png"
|
||||||
|
/>
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user