mirror of
https://github.com/ershisan99/coolify.git
synced 2025-12-24 12:33:17 +00:00
Compare commits
375 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac5cc8b299 | ||
|
|
c588ab723b | ||
|
|
4b2dfc051d | ||
|
|
5238c83f3f | ||
|
|
90bb580e50 | ||
|
|
f40e142704 | ||
|
|
a67618675d | ||
|
|
4fe436e4d1 | ||
|
|
683b8c966f | ||
|
|
28377a156d | ||
|
|
3dcc4faabb | ||
|
|
60a033f93a | ||
|
|
436bd73786 | ||
|
|
5c69ff3339 | ||
|
|
2105b1e7c4 | ||
|
|
523004e5b2 | ||
|
|
5e02c386ec | ||
|
|
b4501fe52d | ||
|
|
3c29eaa1b1 | ||
|
|
ee67e163b1 | ||
|
|
9662bc29fb | ||
|
|
96f2660b98 | ||
|
|
20f594c66c | ||
|
|
2b8d59dca3 | ||
|
|
d44047d109 | ||
|
|
57c4d33bd3 | ||
|
|
7a5377efe0 | ||
|
|
91e7cffccc | ||
|
|
df31e47313 | ||
|
|
cb9586270c | ||
|
|
21dfa5227c | ||
|
|
9d15d2be77 | ||
|
|
929c02d31f | ||
|
|
846185dd42 | ||
|
|
7bc2299a8e | ||
|
|
d40e131bd8 | ||
|
|
552c7297bf | ||
|
|
3f5fd23955 | ||
|
|
8b8566251e | ||
|
|
6db47def8e | ||
|
|
1d0edc7b25 | ||
|
|
f9a417638a | ||
|
|
984fe01551 | ||
|
|
d0cb350687 | ||
|
|
5f51011ce1 | ||
|
|
9ca125ac55 | ||
|
|
360f4f8c27 | ||
|
|
6501f71bd6 | ||
|
|
bf6b799dba | ||
|
|
5f57279283 | ||
|
|
5ed3565520 | ||
|
|
513fa90b8a | ||
|
|
a4d9b9689b | ||
|
|
1c05c0dcbb | ||
|
|
a1b49a3a6b | ||
|
|
6f57298cbb | ||
|
|
d8ce673088 | ||
|
|
4cd7af7a74 | ||
|
|
49c61b5992 | ||
|
|
e44d0550d2 | ||
|
|
17f82109b6 | ||
|
|
2d8888ae9b | ||
|
|
4abe9c6fb2 | ||
|
|
f9d94fa660 | ||
|
|
eaa13f4990 | ||
|
|
01fd5901fe | ||
|
|
3d6adeffc4 | ||
|
|
9066952759 | ||
|
|
6dd7f6274a | ||
|
|
7a8fe6d152 | ||
|
|
be507be3a9 | ||
|
|
657b97f190 | ||
|
|
9d7745cd9b | ||
|
|
3668f83693 | ||
|
|
a2d5d99c1f | ||
|
|
f379ef6a3b | ||
|
|
510a748749 | ||
|
|
550150d685 | ||
|
|
011ea9659e | ||
|
|
6eca7d948e | ||
|
|
90e639f119 | ||
|
|
86ac6461d1 | ||
|
|
18a95bf9ab | ||
|
|
7949bbe66d | ||
|
|
4b603c452a | ||
|
|
837f0634b6 | ||
|
|
78076f7854 | ||
|
|
719350cee1 | ||
|
|
4f6be3e6f5 | ||
|
|
8e61e9fecb | ||
|
|
2083285d78 | ||
|
|
034e86e2cb | ||
|
|
f4a2d5c652 | ||
|
|
534ccd6bf6 | ||
|
|
c17064f853 | ||
|
|
1e1566082f | ||
|
|
449548654d | ||
|
|
6fc99524f0 | ||
|
|
051629fad3 | ||
|
|
f957008c1c | ||
|
|
98e1deec88 | ||
|
|
99127652af | ||
|
|
e9b9e9e82c | ||
|
|
2ed5c3746e | ||
|
|
8902056fdb | ||
|
|
defa6ff6e8 | ||
|
|
eed44e81be | ||
|
|
1951aec5ec | ||
|
|
9c4e0b4107 | ||
|
|
c8deac660d | ||
|
|
4cc5ec9bd0 | ||
|
|
c41bef2e81 | ||
|
|
5b735cf960 | ||
|
|
604e960aa9 | ||
|
|
6c465aa1f2 | ||
|
|
c266832fdc | ||
|
|
906d8d0413 | ||
|
|
cb05fd4a3c | ||
|
|
2eda24799b | ||
|
|
41e221f0cb | ||
|
|
f75af035bb | ||
|
|
e9e6449edf | ||
|
|
f09d76da35 | ||
|
|
40dfe0919b | ||
|
|
85990dd074 | ||
|
|
38acc16e1c | ||
|
|
b7cc4c1e92 | ||
|
|
1f232d96d8 | ||
|
|
83508f165d | ||
|
|
7cc58e7e84 | ||
|
|
31d9740aac | ||
|
|
69891a64a0 | ||
|
|
0940309600 | ||
|
|
a762b1ed60 | ||
|
|
1b9d9d3a8b | ||
|
|
d9908b3d61 | ||
|
|
c40b80436a | ||
|
|
8f1e352bcc | ||
|
|
18e769b5e5 | ||
|
|
27af6459b3 | ||
|
|
2c4bfab01a | ||
|
|
e689be552b | ||
|
|
ad80e7f48b | ||
|
|
d81b75b084 | ||
|
|
90f1431047 | ||
|
|
61ea7dabae | ||
|
|
5d9f5f4a7d | ||
|
|
f956f612d3 | ||
|
|
3f5108268d | ||
|
|
4c0dfc3f30 | ||
|
|
1670fe9b1c | ||
|
|
300b28c0f2 | ||
|
|
e7038961ef | ||
|
|
24e77a5211 | ||
|
|
9df039fbc2 | ||
|
|
143cd46a81 | ||
|
|
680e9871ed | ||
|
|
d5ece58f71 | ||
|
|
d7bbb5c4b7 | ||
|
|
cf9c991c79 | ||
|
|
0f0d96195d | ||
|
|
3a562bb714 | ||
|
|
6381ba8478 | ||
|
|
9e3c14841a | ||
|
|
1917091338 | ||
|
|
b1bb508554 | ||
|
|
0a68a48fc5 | ||
|
|
d3af6792d0 | ||
|
|
44dc3b743e | ||
|
|
b469d2832d | ||
|
|
d844026c29 | ||
|
|
21b4990652 | ||
|
|
39e24bdc97 | ||
|
|
bc66b98176 | ||
|
|
d6d3fb46cc | ||
|
|
4040b334f5 | ||
|
|
d7e72519ef | ||
|
|
c7752f0be9 | ||
|
|
0ffe28a733 | ||
|
|
56f24fe317 | ||
|
|
341cde2781 | ||
|
|
33bb8d434d | ||
|
|
9f813b7385 | ||
|
|
02a336a25d | ||
|
|
88ed1446f4 | ||
|
|
c69312f128 | ||
|
|
c5bcff0e10 | ||
|
|
871d1e2440 | ||
|
|
1619afb938 | ||
|
|
25528913f1 | ||
|
|
7df532fa72 | ||
|
|
ef91441c76 | ||
|
|
aa6c56b63d | ||
|
|
18e899d15e | ||
|
|
63fa8924ae | ||
|
|
0e13e3bd81 | ||
|
|
372c0ed457 | ||
|
|
071077200b | ||
|
|
65579a2861 | ||
|
|
bb7603ae2a | ||
|
|
cce67d274e | ||
|
|
794329dcad | ||
|
|
e36fda3ff1 | ||
|
|
3832d33259 | ||
|
|
1f40c2ccf8 | ||
|
|
7350524456 | ||
|
|
a1a973a873 | ||
|
|
f2a915700c | ||
|
|
e184f99617 | ||
|
|
ab07adb14f | ||
|
|
6535c68276 | ||
|
|
dde2772e52 | ||
|
|
4a8fd309c5 | ||
|
|
b416849d9c | ||
|
|
bc321d8ced | ||
|
|
ac72c19d22 | ||
|
|
67fc2fd3c0 | ||
|
|
4acc59204c | ||
|
|
07cadb59e0 | ||
|
|
6fa4741c81 | ||
|
|
f4bac2382c | ||
|
|
67b72220c0 | ||
|
|
feeb14ea47 | ||
|
|
bdfb6f5f46 | ||
|
|
53491e9eaa | ||
|
|
f720de65fa | ||
|
|
3d70162a8d | ||
|
|
d3a1bbc3d0 | ||
|
|
0078574ee6 | ||
|
|
7cfd313531 | ||
|
|
e7919e9a1b | ||
|
|
98073202e9 | ||
|
|
8dee345f85 | ||
|
|
9161882f33 | ||
|
|
eef313665b | ||
|
|
53e70fbfcb | ||
|
|
05a1721499 | ||
|
|
2f772080b8 | ||
|
|
a5548c080c | ||
|
|
7e0a1ecc80 | ||
|
|
3f2dcccc07 | ||
|
|
adc5965b32 | ||
|
|
45919fc0cf | ||
|
|
dd6f4c4844 | ||
|
|
bb47db033f | ||
|
|
111ea78693 | ||
|
|
c17253589a | ||
|
|
7e6156f5dd | ||
|
|
d5cfb63f52 | ||
|
|
cab15055e7 | ||
|
|
9185910171 | ||
|
|
b4892e0caf | ||
|
|
83e0cafef9 | ||
|
|
7cb75506c3 | ||
|
|
ac6970ad40 | ||
|
|
5a95cc236c | ||
|
|
95c942f477 | ||
|
|
6088f2e573 | ||
|
|
fc705746c0 | ||
|
|
8182359fe4 | ||
|
|
e7ae15162c | ||
|
|
12ca20432d | ||
|
|
8b7406e168 | ||
|
|
9d6317f782 | ||
|
|
d8bdb73140 | ||
|
|
476db15431 | ||
|
|
20ce356296 | ||
|
|
ea594dcbc6 | ||
|
|
021b9746a8 | ||
|
|
c4615ae557 | ||
|
|
95a5089bdc | ||
|
|
cef1fba281 | ||
|
|
5c7859a258 | ||
|
|
986cdae5b0 | ||
|
|
3b11e28d6c | ||
|
|
eba63e8e76 | ||
|
|
2fc65e3b42 | ||
|
|
7d504ab2bf | ||
|
|
216c7efd42 | ||
|
|
8c4149db16 | ||
|
|
20ac8f69ea | ||
|
|
1db3d7a6fb | ||
|
|
b1c1138cf8 | ||
|
|
00b1a4f174 | ||
|
|
86cc665b58 | ||
|
|
e26dd578ef | ||
|
|
f1f3217052 | ||
|
|
8f14fd89ef | ||
|
|
26e4d52a61 | ||
|
|
319c647147 | ||
|
|
f4cd93bd36 | ||
|
|
5a80bb1d2a | ||
|
|
1126dcacf5 | ||
|
|
bdf9a73d19 | ||
|
|
1f73b83a79 | ||
|
|
73bd62c51e | ||
|
|
9acd5c94e8 | ||
|
|
6e85eac14b | ||
|
|
936baf676e | ||
|
|
867f06d813 | ||
|
|
7f9f440789 | ||
|
|
5a15e64471 | ||
|
|
c9aecd51f3 | ||
|
|
6ca1d978d4 | ||
|
|
a18c73bd7c | ||
|
|
26d86cbcb5 | ||
|
|
b24a5d9aca | ||
|
|
5305bc1ceb | ||
|
|
a672f0f56c | ||
|
|
9ab5e13e8f | ||
|
|
a49171f8cc | ||
|
|
65d8dc412a | ||
|
|
8600400632 | ||
|
|
11d10bee12 | ||
|
|
dbd16e8285 | ||
|
|
eb26787079 | ||
|
|
b0a7b1eb3d | ||
|
|
f994092d7f | ||
|
|
946d8e5be5 | ||
|
|
6d7c2ae74a | ||
|
|
1ba71b0b1b | ||
|
|
47c3af6a0e | ||
|
|
e5527a5aa5 | ||
|
|
9bb0dcd73f | ||
|
|
4159804052 | ||
|
|
adb27cf143 | ||
|
|
a4879d854f | ||
|
|
8b92dfb889 | ||
|
|
eb4868cb6e | ||
|
|
e06e6e05ae | ||
|
|
4fce4f81c7 | ||
|
|
ae11283574 | ||
|
|
15fc9aa483 | ||
|
|
2ebfb8e6a9 | ||
|
|
d098ea675f | ||
|
|
8ad152e5fc | ||
|
|
14077fcf51 | ||
|
|
b427573e19 | ||
|
|
46268f0dcf | ||
|
|
006c178eb1 | ||
|
|
d63b20dabb | ||
|
|
fcf0a391ed | ||
|
|
263b9c4b3e | ||
|
|
1dc7355952 | ||
|
|
67e4a72a28 | ||
|
|
e6ea07f9b7 | ||
|
|
44a691ae29 | ||
|
|
290dbc43cb | ||
|
|
219f1f9f3f | ||
|
|
582170f26e | ||
|
|
4e2dad7720 | ||
|
|
d002ec72ad | ||
|
|
f6bb14f7c4 | ||
|
|
e1697848a5 | ||
|
|
4d48bba350 | ||
|
|
a690cc5564 | ||
|
|
be16f76034 | ||
|
|
ae4cf44728 | ||
|
|
4ac0df71b1 | ||
|
|
dbd948867c | ||
|
|
a9b5cd6c31 | ||
|
|
92f513d514 | ||
|
|
b239d21961 | ||
|
|
40e8dd4a8d | ||
|
|
a667435ef2 | ||
|
|
042b4e7587 | ||
|
|
c46a1b4a59 | ||
|
|
e6035d5479 | ||
|
|
008d090093 | ||
|
|
6ff080c36b | ||
|
|
086ca30323 | ||
|
|
7ed1ced521 | ||
|
|
801b9c1483 | ||
|
|
3990bebca3 | ||
|
|
8a2de1001f |
@@ -20,12 +20,13 @@
|
||||
"svelte.svelte-vscode",
|
||||
"ardenivanov.svelte-intellisense",
|
||||
"Prisma.prisma",
|
||||
"bradlc.vscode-tailwindcss"
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"waderyan.gitblame"
|
||||
],
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [3000, 3001],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "cp apps/api/.env.example pps/api/.env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
"postCreateCommand": "cp apps/api/.env.example apps/api/.env && pnpm install && pnpm db:push && pnpm db:seed",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node",
|
||||
"features": {
|
||||
|
||||
64
.github/workflows/production-release.yml
vendored
64
.github/workflows/production-release.yml
vendored
@@ -2,14 +2,14 @@ name: production-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
arm64-build:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
@@ -26,11 +26,59 @@ jobs:
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:latest,coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache,mode=max
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
||||
amd64-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64-build, arm64-build]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
90
.github/workflows/release-candidate.yml
vendored
Normal file
90
.github/workflows/release-candidate.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
name: release-candidate
|
||||
on:
|
||||
release:
|
||||
types: [prereleased]
|
||||
|
||||
jobs:
|
||||
arm64-making-something-cool:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{github.event.release.name}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-arm64,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
amd64-making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{github.event.release.name}}-amd64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-rc-amd64,mode=max
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
merge-manifest-to-be-cool:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [arm64-making-something-cool, amd64-making-something-cool]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:${{github.event.release.name}} --amend coollabsio/coolify:${{github.event.release.name}}-amd64 --amend coollabsio/coolify:${{github.event.release.name}}-arm64
|
||||
docker manifest push coollabsio/coolify:${{github.event.release.name}}
|
||||
|
||||
64
.github/workflows/staging-release.yml
vendored
64
.github/workflows/staging-release.yml
vendored
@@ -6,11 +6,13 @@ on:
|
||||
- next
|
||||
|
||||
jobs:
|
||||
staging-release:
|
||||
runs-on: ubuntu-latest
|
||||
arm64-making-something-cool:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
@@ -20,15 +22,65 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
||||
amd64-making-something-cool:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next,mode=max
|
||||
tags: coollabsio/coolify:next-amd64,coollabsio/coolify:next-test
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||
merge-manifest-to-be-cool:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [arm64-making-something-cool, amd64-making-something-cool]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:next --amend coollabsio/coolify:next-amd64 --amend coollabsio/coolify:next-arm64
|
||||
docker manifest push coollabsio/coolify:next
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -12,4 +12,6 @@ client
|
||||
apps/api/db/*.db
|
||||
local-serve
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/core*
|
||||
apps/api/core*
|
||||
logs
|
||||
others/certificates
|
||||
256
CONTRIBUTING.md
256
CONTRIBUTING.md
@@ -1,256 +0,0 @@
|
||||
# 👋 Welcome
|
||||
|
||||
First of all, thank you for considering contributing to my project! It means a lot 💜.
|
||||
|
||||
|
||||
## 🙋 Want to help?
|
||||
|
||||
If you begin in GitHub contribution, you can find the [first contribution](https://github.com/firstcontributions/first-contributions) and follow this guide.
|
||||
|
||||
Follow the [introduction](#introduction) to get started then start contributing!
|
||||
|
||||
This is a little list of what you can do to help the project:
|
||||
|
||||
- [🧑💻 Develop your own ideas](#developer-contribution)
|
||||
- [🌐 Translate the project](#translation)
|
||||
|
||||
## 👋 Introduction
|
||||
|
||||
### Setup with Github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup with Gitpod
|
||||
|
||||
If you have a [Gitpod](https://gitpod.io), you can just create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Setup locally in your machine
|
||||
|
||||
> 🔴 At the moment, Coolify **doesn't support Windows**. You must use Linux or MacOS. Consider using Gitpod or Github Codespaces.
|
||||
|
||||
#### Recommended Pull Request Guideline
|
||||
|
||||
- Fork the project
|
||||
- Clone your fork repo to local
|
||||
- Create a new branch
|
||||
- Push to your fork repo
|
||||
- Create a pull request: https://github.com/coollabsio/coolify/compare
|
||||
- Write a proper description
|
||||
- Open the pull request to review against `next` branch
|
||||
|
||||
---
|
||||
|
||||
# 🧑💻 Developer contribution
|
||||
## Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
|
||||
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
|
||||
- **Docker Engine API**
|
||||
|
||||
---
|
||||
|
||||
## How to start after you set up your local fork?
|
||||
|
||||
### Prerequisites
|
||||
1. Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
2. You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
3. You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
|
||||
4. You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
|
||||
|
||||
Optional:
|
||||
|
||||
4. To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
|
||||
|
||||
### Steps for local setup
|
||||
|
||||
1. Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
2. Install dependencies with `pnpm install`.
|
||||
3. Need to create a local SQlite database with `pnpm db:push`.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
4. Seed the database with base entities with `pnpm db:seed`
|
||||
5. You can start coding after starting `pnpm dev`.
|
||||
|
||||
---
|
||||
|
||||
## Database migrations
|
||||
|
||||
During development, if you change the database layout, you need to run `pnpm db:push` to migrate the database and create types for Prisma. You also need to restart the development process.
|
||||
|
||||
If the schema is finalized, you need to create a migration file with `pnpm db:migrate <nameOfMigration>` where `nameOfMigration` is given by you. Make it sense. :)
|
||||
|
||||
---
|
||||
|
||||
## How to add new services
|
||||
|
||||
You can add any open-source and self-hostable software (service/application) to Coolify if the following statements are true:
|
||||
|
||||
- Self-hostable (obviously)
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
## Backend
|
||||
|
||||
There are 5 steps you should make on the backend side.
|
||||
|
||||
1. Create Prisma / database schema for the new service.
|
||||
2. Add supported versions of the service.
|
||||
3. Update global functions.
|
||||
4. Create API endpoints.
|
||||
5. Define automatically generated variables.
|
||||
|
||||
> I will use [Umami](https://umami.is/) as an example service.
|
||||
|
||||
### Create Prisma / Database schema for the new service.
|
||||
|
||||
You only need to do this if you store passwords or any persistent configuration. Mostly it is required by all services, but there are some exceptions, like NocoDB.
|
||||
|
||||
Update Prisma schema in [prisma/schema.prisma](prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationship with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
If you are finished with the Prisma schema, you should update the database schema with `pnpm db:push` command.
|
||||
|
||||
> You must restart the running development environment to be able to use the new model
|
||||
|
||||
> If you use VSCode/TLS, you probably need to restart the `Typescript Language Server` to get the new types loaded in the running environment.
|
||||
|
||||
### Add supported versions
|
||||
|
||||
Supported versions are hardcoded into Coolify (for now).
|
||||
|
||||
You need to update `supportedServiceTypesAndVersions` function at [src/apps/api/src/lib/supportedVersions.ts](src/apps/api/src/lib/supportedVersions.ts). Example JSON:
|
||||
|
||||
```js
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Add required functions/properties
|
||||
|
||||
1. Add the new service to the `includeServices` variable in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts), so it will be included in all places in the database queries where it is required.
|
||||
|
||||
```js
|
||||
const include: any = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
minio: true,
|
||||
plausibleAnalytics: true,
|
||||
vscodeserver: true,
|
||||
wordpress: true,
|
||||
ghost: true,
|
||||
meiliSearch: true,
|
||||
umami: true // This line!
|
||||
};
|
||||
```
|
||||
|
||||
2. Update the database update query with the new service type to `configureServiceType` function in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts). This function defines the automatically generated variables (passwords, users, etc.) and it's encryption process (if applicable).
|
||||
|
||||
```js
|
||||
[...]
|
||||
else if (type === 'umami') {
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword());
|
||||
const postgresqlDatabase = 'umami';
|
||||
const hashSalt = encrypt(generatePassword(64));
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
umami: {
|
||||
create: {
|
||||
postgresqlDatabase,
|
||||
postgresqlPassword,
|
||||
postgresqlUser,
|
||||
hashSalt,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
3. Add field details to [apps/api/src/lib/services/serviceFields.ts](apps/api/src/lib/services/serviceFields.ts), so every component will know what to do with the values (decrypt/show it by default/readonly)
|
||||
|
||||
```js
|
||||
export const umami = [{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
```
|
||||
|
||||
4. Add service deletion query to `removeService` function in [apps/api/src/lib/services/common.ts](apps/api/src/lib/services/common.ts)
|
||||
|
||||
|
||||
5. You need to add start process for the new service in [apps/api/src/routes/api/v1/services/handlers.ts](apps/api/src/routes/api/v1/services/handlers.ts)
|
||||
|
||||
> See startUmamiService() function as example.
|
||||
|
||||
|
||||
6. You need to add a custom logo at [apps/ui/src/lib/components/svg/services](apps/ui/src/lib/components/svg/services) as a svelte component and export it in [apps/ui/src/lib/components/svg/services/index.ts](apps/ui/src/lib/components/svg/services/index.ts)
|
||||
|
||||
SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
7. You need to include it the logo at:
|
||||
|
||||
- [apps/ui/src/lib/components/svg/services/ServiceIcons.svelte](apps/ui/src/lib/components/svg/services/ServiceIcons.svelte) with `isAbsolute`.
|
||||
- [apps/ui/src/routes/services/[id]/_ServiceLinks.svelte](apps/ui/src/routes/services/[id]/_ServiceLinks.svelte) with the link to the docs/main site of the service
|
||||
|
||||
8. By default the URL and the name frontend forms are included in [apps/ui/src/routes/services/[id]/_Services/_Services.svelte](apps/ui/src/routes/services/[id]/_Services/_Services.svelte).
|
||||
|
||||
If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component to [apps/ui/src/routes/services/[id]/_Services](apps/ui/src/routes/services/[id]/_Services) with an underscore.
|
||||
|
||||
> For example, see other [here](apps/ui/src/routes/services/[id]/_Services/_Umami.svelte).
|
||||
|
||||
|
||||
Good job! 👏
|
||||
|
||||
<!-- # 🌐 Translate the project
|
||||
|
||||
The project use [sveltekit-i18n](https://github.com/sveltekit-i18n/lib) to translate the project.
|
||||
It follows the [ISO 639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) to name languages.
|
||||
|
||||
### Installation
|
||||
|
||||
You must have gone throw all the [intro](#introduction) steps before you can start translating.
|
||||
|
||||
It's only an advice, but I recommend you to use:
|
||||
|
||||
- Visual Studio Code
|
||||
- [i18n Ally for Visual Studio Code](https://marketplace.visualstudio.com/items?itemName=Lokalise.i18n-ally): ideal to see the progress of the translation.
|
||||
- [Svelte for VS Code](https://marketplace.visualstudio.com/items?itemName=svelte.svelte-vscode): to get the syntax color for the project
|
||||
|
||||
### Adding a language
|
||||
|
||||
If your language doesn't appear in the [locales folder list](src/lib/locales/), follow the step below:
|
||||
|
||||
1. In `src/lib/locales/`, Copy paste `en.json` and rename it with your language (eg: `cz.json`).
|
||||
2. In the [lang.json](src/lib/lang.json) file, add a line after the first bracket (`{`) with `"ISO of your language": "Language",` (eg: `"cz": "Czech",`).
|
||||
3. Have fun translating! -->
|
||||
115
CONTRIBUTION.md
Normal file
115
CONTRIBUTION.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# Contribution
|
||||
|
||||
First, thanks for considering to contribute to my project. It really means a lot! :)
|
||||
|
||||
You can ask for guidance anytime on our Discord server in the #contribution channel.
|
||||
|
||||
## Setup your development environment
|
||||
### Github codespaces
|
||||
|
||||
If you have github codespaces enabled then you can just create a codespace and run `pnpm dev` to run your the dev environment. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Gitpod
|
||||
|
||||
If you have a [Gitpod](https://gitpod.io), you can just create a workspace from this repository, run `pnpm install && pnpm db:push && pnpm db:seed` and then `pnpm dev`. All the required dependencies and packages has been configured for you already.
|
||||
|
||||
### Local Machine
|
||||
> At the moment, Coolify `doesn't support Windows`. You must use `Linux` or `MacOS` or consider using Gitpod or Github Codespaces.
|
||||
|
||||
- Due to the lock file, this repository is best with [pnpm](https://pnpm.io). I recommend you try and use `pnpm` because it is cool and efficient!
|
||||
|
||||
- You need to have [Docker Engine](https://docs.docker.com/engine/install/) installed locally.
|
||||
- You need to have [Docker Compose Plugin](https://docs.docker.com/compose/install/compose-plugin/) installed locally.
|
||||
- You need to have [GIT LFS Support](https://git-lfs.github.com/) installed locally.
|
||||
|
||||
Optional:
|
||||
- To test Heroku buildpacks, you need [pack](https://github.com/buildpacks/pack) binary installed locally.
|
||||
|
||||
### Inside a Docker container
|
||||
`WIP`
|
||||
|
||||
## Setup Coolify
|
||||
- Copy `apps/api/.env.template` to `apps/api/.env.template` and set the `COOLIFY_APP_ID` environment variable to something cool.
|
||||
- `pnpm install` to install dependencies.
|
||||
- `pnpm db:push` to o create a local SQlite database.
|
||||
|
||||
This will apply all migrations at `db/dev.db`.
|
||||
|
||||
- `pnpm db:seed` seed the database.
|
||||
- `pnpm dev` start coding.
|
||||
|
||||
## Technical skills required
|
||||
|
||||
- **Languages**: Node.js / Javascript / Typescript
|
||||
- **Framework JS/TS**: [SvelteKit](https://kit.svelte.dev/) & [Fastify](https://www.fastify.io/)
|
||||
- **Database ORM**: [Prisma.io](https://www.prisma.io/)
|
||||
- **Docker Engine API**
|
||||
|
||||
## Add a new service
|
||||
### Which service is eligable to add to Coolify?
|
||||
The following statements needs to be true:
|
||||
|
||||
- Self-hostable
|
||||
- Open-source
|
||||
- Maintained (I do not want to add software full of bugs)
|
||||
|
||||
### Create Prisma / Database schema for the new service.
|
||||
All data that needs to be persist for a service should be saved to the database in `cleartext` or `encrypted`.
|
||||
|
||||
very password/api key/passphrase needs to be encrypted. If you are not sure, whether it should be encrypted or not, just encrypt it.
|
||||
|
||||
Update Prisma schema in [src/apps/api/prisma/schema.prisma](https://github.com/coollabsio/coolify/blob/main/apps/api/prisma/schema.prisma).
|
||||
|
||||
- Add new model with the new service name.
|
||||
- Make a relationship with `Service` model.
|
||||
- In the `Service` model, the name of the new field should be with low-capital.
|
||||
- If the service needs a database, define a `publicPort` field to be able to make it's database public, example field name in case of PostgreSQL: `postgresqlPublicPort`. It should be a optional field.
|
||||
|
||||
Once done, create Prisma schema with `pnpm db:push`.
|
||||
> You may also need to restart `Typescript Language Server` in your IDE to get the new types.
|
||||
|
||||
### Add available versions
|
||||
|
||||
Versions are hardcoded into Coolify at the moment and based on Docker image tags.
|
||||
- Update `supportedServiceTypesAndVersions` function [here](apps/api/src/lib/services/supportedVersions.ts)
|
||||
|
||||
### Include the new service in queries
|
||||
|
||||
At [here](apps/api/src/lib/services/common.ts) in `includeServices` function add the new table name, so it will be included in all places in the database queries where it is required.
|
||||
|
||||
### Define auto-generated fields
|
||||
|
||||
At [here](apps/api/src/lib/services/common.ts) in `configureServiceType` function add the initial auto-generated details such as password, users etc, and the encryption process of secrets (if applicable).
|
||||
|
||||
### Define input field details
|
||||
|
||||
At [here](apps/api/src/lib/services/serviceFields.ts) add details about the input fields shown in the UI, so every component (API/UI) will know what to do with the values (decrypt/show it by default/readonly/etc).
|
||||
|
||||
### Define the start process
|
||||
|
||||
- At [here](apps/api/src/lib/services/handlers.ts), define how the service should start. It could be complex and based on `docker-compose` definitions.
|
||||
|
||||
> See `startUmamiService()` function as example.
|
||||
|
||||
- At [here](apps/api/src/routes/api/v1/services/handlers.ts), add the new start service process to `startService` function.
|
||||
|
||||
### Define the deletion process
|
||||
|
||||
[Here](apps/api/src/lib/services/common.ts) in `removeService` add the database deletion process.
|
||||
|
||||
### Custom logo
|
||||
|
||||
- At [here](apps/ui/src/lib/components/svg/services) add the service custom log as a Svelte component and export it [here](apps/ui/src/lib/components/svg/services/index.ts).
|
||||
|
||||
> SVG is recommended, but you can use PNG as well. It should have the `isAbsolute` variable with the suitable CSS classes, primarily for sizing and positioning.
|
||||
|
||||
- At [here](apps/ui/src/lib/components/svg/services/ServiceIcons.svelte) include the new logo with `isAbsolute` property.
|
||||
|
||||
- At [here](apps/ui/src/routes/services/[id]/_ServiceLinks.svelte) add links to the documentation of the service.
|
||||
|
||||
### Custom fields on the UI
|
||||
By default the URL and name are shown on the UI. Everything else needs to be added [here](apps/ui/src/routes/services/[id]/_Services/_Services.svelte)
|
||||
|
||||
> If you need to show more details on the frontend, such as users/passwords, you need to add Svelte component [here](apps/ui/src/routes/services/[id]/_Services) with an underscore. For example, see other [here](apps/ui/src/routes/services/[id]/_Services/_Umami.svelte).
|
||||
|
||||
Good job! 👏
|
||||
27
Dockerfile
27
Dockerfile
@@ -1,30 +1,26 @@
|
||||
FROM node:18-alpine3.16 as build
|
||||
ARG PNPM_VERSION=7.11.0
|
||||
ARG NPM_VERSION=8.19.1
|
||||
|
||||
FROM node:18-slim as build
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache curl
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
RUN apt update && apt -y install curl
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
|
||||
COPY . .
|
||||
RUN pnpm install
|
||||
RUN pnpm build
|
||||
|
||||
# Production build
|
||||
FROM node:18-alpine3.16
|
||||
FROM node:18-slim
|
||||
WORKDIR /app
|
||||
ENV NODE_ENV production
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
ENV PRISMA_QUERY_ENGINE_BINARY=/app/prisma-engines/query-engine \
|
||||
PRISMA_MIGRATION_ENGINE_BINARY=/app/prisma-engines/migration-engine \
|
||||
PRISMA_INTROSPECTION_ENGINE_BINARY=/app/prisma-engines/introspection-engine \
|
||||
PRISMA_FMT_BINARY=/app/prisma-engines/prisma-fmt \
|
||||
PRISMA_CLI_QUERY_ENGINE_TYPE=binary \
|
||||
PRISMA_CLIENT_ENGINE_TYPE=binary
|
||||
|
||||
COPY --from=coollabsio/prisma-engine:4.2.0 /prisma-engines/query-engine /prisma-engines/migration-engine /prisma-engines/introspection-engine /prisma-engines/prisma-fmt /app/prisma-engines/
|
||||
|
||||
RUN apk add --no-cache git git-lfs openssh-client curl jq cmake sqlite openssl psmisc
|
||||
RUN curl -sL https://unpkg.com/@pnpm/self-installer | node
|
||||
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
|
||||
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
RUN npm install -g npm@${PNPM_VERSION}
|
||||
|
||||
RUN mkdir -p ~/.docker/cli-plugins/
|
||||
# https://download.docker.com/linux/static/stable/
|
||||
@@ -37,6 +33,7 @@ RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker
|
||||
RUN (curl -sSL "https://github.com/buildpacks/pack/releases/download/v0.27.0/pack-v0.27.0-linux.tgz" | tar -C /usr/local/bin/ --no-same-owner -xzv pack)
|
||||
|
||||
COPY --from=build /app/apps/api/build/ .
|
||||
COPY --from=build /app/others/fluentbit/ ./fluentbit
|
||||
COPY --from=build /app/apps/ui/build/ ./public
|
||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||
COPY --from=build /app/apps/api/package.json .
|
||||
|
||||
@@ -15,20 +15,23 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.2.0",
|
||||
"@fastify/autoload": "5.3.1",
|
||||
"@fastify/cookie": "8.1.0",
|
||||
"@fastify/cors": "8.1.0",
|
||||
"@fastify/env": "4.1.0",
|
||||
"@fastify/jwt": "6.3.2",
|
||||
"@fastify/multipart": "7.2.0",
|
||||
"@fastify/static": "6.5.0",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@ladjs/graceful": "3.0.2",
|
||||
"@prisma/client": "3.15.2",
|
||||
"@prisma/client": "4.3.1",
|
||||
"axios": "0.27.2",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.2",
|
||||
"cabin": "9.1.2",
|
||||
"compare-versions": "5.0.1",
|
||||
"csv-parse": "^5.3.0",
|
||||
"csvtojson": "^2.0.10",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.5",
|
||||
"dockerode": "3.3.4",
|
||||
@@ -37,7 +40,7 @@
|
||||
"fastify": "4.5.3",
|
||||
"fastify-plugin": "4.2.1",
|
||||
"generate-password": "1.7.0",
|
||||
"got": "12.3.1",
|
||||
"got": "12.4.1",
|
||||
"is-ip": "5.0.0",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
@@ -47,25 +50,26 @@
|
||||
"p-all": "4.0.0",
|
||||
"p-throttle": "5.0.0",
|
||||
"public-ip": "6.0.1",
|
||||
"pump": "^3.0.0",
|
||||
"ssh-config": "4.1.6",
|
||||
"strip-ansi": "7.0.1",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.7.13",
|
||||
"@types/node": "18.7.15",
|
||||
"@types/node-os-utils": "1.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.35.1",
|
||||
"@typescript-eslint/parser": "5.35.1",
|
||||
"esbuild": "0.15.5",
|
||||
"@typescript-eslint/eslint-plugin": "5.36.2",
|
||||
"@typescript-eslint/parser": "5.36.2",
|
||||
"esbuild": "0.15.7",
|
||||
"eslint": "8.23.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-prettier": "4.2.1",
|
||||
"nodemon": "2.0.19",
|
||||
"prettier": "2.7.1",
|
||||
"prisma": "3.15.2",
|
||||
"prisma": "4.3.1",
|
||||
"rimraf": "3.0.2",
|
||||
"tsconfig-paths": "4.1.0",
|
||||
"typescript": "4.7.4"
|
||||
"typescript": "4.8.2"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.js"
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Weblate" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"adminPassword" TEXT NOT NULL,
|
||||
"postgresqlHost" TEXT NOT NULL,
|
||||
"postgresqlPort" INTEGER NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Weblate_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Weblate_serviceId_key" ON "Weblate"("serviceId");
|
||||
@@ -0,0 +1,23 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Taiga" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"secretKey" TEXT NOT NULL,
|
||||
"erlangSecret" TEXT NOT NULL,
|
||||
"djangoAdminPassword" TEXT NOT NULL,
|
||||
"djangoAdminUser" TEXT NOT NULL,
|
||||
"rabbitMQUser" TEXT NOT NULL,
|
||||
"rabbitMQPassword" TEXT NOT NULL,
|
||||
"postgresqlHost" TEXT NOT NULL,
|
||||
"postgresqlPort" INTEGER NOT NULL,
|
||||
"postgresqlUser" TEXT NOT NULL,
|
||||
"postgresqlPassword" TEXT NOT NULL,
|
||||
"postgresqlDatabase" TEXT NOT NULL,
|
||||
"postgresqlPublicPort" INTEGER,
|
||||
"serviceId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "Taiga_serviceId_fkey" FOREIGN KEY ("serviceId") REFERENCES "Service" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Taiga_serviceId_key" ON "Taiga"("serviceId");
|
||||
@@ -0,0 +1,22 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to alter the column `time` on the `BuildLog` table. The data in that column could be lost. The data in that column will be cast from `Int` to `BigInt`.
|
||||
|
||||
*/
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_BuildLog" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT,
|
||||
"buildId" TEXT NOT NULL,
|
||||
"line" TEXT NOT NULL,
|
||||
"time" BIGINT NOT NULL
|
||||
);
|
||||
INSERT INTO "new_BuildLog" ("applicationId", "buildId", "id", "line", "time") SELECT "applicationId", "buildId", "id", "line", "time" FROM "BuildLog";
|
||||
DROP TABLE "BuildLog";
|
||||
ALTER TABLE "new_BuildLog" RENAME TO "BuildLog";
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,20 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "ApplicationConnectedDatabase" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"databaseId" TEXT,
|
||||
"hostedDatabaseType" TEXT,
|
||||
"hostedDatabaseHost" TEXT,
|
||||
"hostedDatabasePort" INTEGER,
|
||||
"hostedDatabaseName" TEXT,
|
||||
"hostedDatabaseUser" TEXT,
|
||||
"hostedDatabasePassword" TEXT,
|
||||
"hostedDatabaseDBName" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationConnectedDatabase_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "ApplicationConnectedDatabase_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ApplicationConnectedDatabase_applicationId_key" ON "ApplicationConnectedDatabase"("applicationId");
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Setting" ADD COLUMN "isAPIDebuggingEnabled" BOOLEAN DEFAULT false;
|
||||
@@ -0,0 +1,13 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "DatabaseSecret" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"databaseId" TEXT NOT NULL,
|
||||
CONSTRAINT "DatabaseSecret_databaseId_fkey" FOREIGN KEY ("databaseId") REFERENCES "Database" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "DatabaseSecret_name_databaseId_key" ON "DatabaseSecret"("name", "databaseId");
|
||||
@@ -0,0 +1,18 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "Build" ADD COLUMN "previewApplicationId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "PreviewApplication" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"pullmergeRequestId" TEXT NOT NULL,
|
||||
"sourceBranch" TEXT NOT NULL,
|
||||
"isRandomDomain" BOOLEAN NOT NULL DEFAULT false,
|
||||
"customDomain" TEXT,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "PreviewApplication_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "PreviewApplication_applicationId_key" ON "PreviewApplication"("applicationId");
|
||||
@@ -0,0 +1,10 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "Certificate" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"key" TEXT NOT NULL,
|
||||
"cert" TEXT NOT NULL,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"teamId" TEXT,
|
||||
CONSTRAINT "Certificate_teamId_fkey" FOREIGN KEY ("teamId") REFERENCES "Team" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
@@ -0,0 +1,23 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,26 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"forPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||
DROP TABLE "GitSource";
|
||||
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -1,6 +1,6 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
binaryTargets = ["native", "linux-musl"]
|
||||
binaryTargets = ["native"]
|
||||
}
|
||||
|
||||
datasource db {
|
||||
@@ -8,9 +8,20 @@ datasource db {
|
||||
url = env("COOLIFY_DATABASE_URL")
|
||||
}
|
||||
|
||||
model Certificate {
|
||||
id String @id @default(cuid())
|
||||
key String
|
||||
cert String
|
||||
team Team? @relation(fields: [teamId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
teamId String?
|
||||
}
|
||||
|
||||
model Setting {
|
||||
id String @id @default(cuid())
|
||||
fqdn String? @unique
|
||||
isAPIDebuggingEnabled Boolean? @default(false)
|
||||
isRegistrationEnabled Boolean @default(false)
|
||||
dualCerts Boolean @default(false)
|
||||
minPort Int @default(9000)
|
||||
@@ -69,6 +80,7 @@ model Team {
|
||||
gitLabApps GitlabApp[]
|
||||
service Service[]
|
||||
users User[]
|
||||
certificate Certificate[]
|
||||
}
|
||||
|
||||
model TeamInvitation {
|
||||
@@ -117,6 +129,37 @@ model Application {
|
||||
settings ApplicationSettings?
|
||||
secrets Secret[]
|
||||
teams Team[]
|
||||
connectedDatabase ApplicationConnectedDatabase?
|
||||
previewApplication PreviewApplication[]
|
||||
}
|
||||
|
||||
model PreviewApplication {
|
||||
id String @id @default(cuid())
|
||||
pullmergeRequestId String
|
||||
sourceBranch String
|
||||
isRandomDomain Boolean @default(false)
|
||||
customDomain String?
|
||||
applicationId String @unique
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model ApplicationConnectedDatabase {
|
||||
id String @id @default(cuid())
|
||||
applicationId String @unique
|
||||
databaseId String?
|
||||
hostedDatabaseType String?
|
||||
hostedDatabaseHost String?
|
||||
hostedDatabasePort Int?
|
||||
hostedDatabaseName String?
|
||||
hostedDatabaseUser String?
|
||||
hostedDatabasePassword String?
|
||||
hostedDatabaseDBName String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
database Database? @relation(fields: [databaseId], references: [id])
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
}
|
||||
|
||||
model ApplicationSettings {
|
||||
@@ -128,6 +171,8 @@ model ApplicationSettings {
|
||||
autodeploy Boolean @default(true)
|
||||
isBot Boolean @default(false)
|
||||
isPublicRepository Boolean @default(false)
|
||||
isDBBranching Boolean @default(false)
|
||||
isCustomSSL Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
@@ -186,25 +231,26 @@ model BuildLog {
|
||||
applicationId String?
|
||||
buildId String
|
||||
line String
|
||||
time Int
|
||||
time BigInt
|
||||
}
|
||||
|
||||
model Build {
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
pullmergeRequestId String?
|
||||
forceRebuild Boolean @default(false)
|
||||
sourceBranch String?
|
||||
branch String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
id String @id @default(cuid())
|
||||
type String
|
||||
applicationId String?
|
||||
destinationDockerId String?
|
||||
gitSourceId String?
|
||||
githubAppId String?
|
||||
gitlabAppId String?
|
||||
commit String?
|
||||
pullmergeRequestId String?
|
||||
previewApplicationId String?
|
||||
forceRebuild Boolean @default(false)
|
||||
sourceBranch String?
|
||||
branch String?
|
||||
status String? @default("queued")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model DestinationDocker {
|
||||
@@ -253,6 +299,7 @@ model GitSource {
|
||||
updatedAt DateTime @updatedAt
|
||||
githubAppId String? @unique
|
||||
gitlabAppId String? @unique
|
||||
isSystemWide Boolean @default(false)
|
||||
gitlabApp GitlabApp? @relation(fields: [gitlabAppId], references: [id])
|
||||
githubApp GithubApp? @relation(fields: [githubAppId], references: [id])
|
||||
application Application[]
|
||||
@@ -291,22 +338,36 @@ model GitlabApp {
|
||||
}
|
||||
|
||||
model Database {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
destinationDockerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
settings DatabaseSettings?
|
||||
teams Team[]
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
publicPort Int?
|
||||
defaultDatabase String?
|
||||
type String?
|
||||
version String?
|
||||
dbUser String?
|
||||
dbUserPassword String?
|
||||
rootUser String?
|
||||
rootUserPassword String?
|
||||
destinationDockerId String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
destinationDocker DestinationDocker? @relation(fields: [destinationDockerId], references: [id])
|
||||
settings DatabaseSettings?
|
||||
teams Team[]
|
||||
applicationConnectedDatabase ApplicationConnectedDatabase[]
|
||||
databaseSecret DatabaseSecret[]
|
||||
}
|
||||
|
||||
model DatabaseSecret {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
value String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
databaseId String
|
||||
database Database @relation(fields: [databaseId], references: [id])
|
||||
|
||||
@@unique([name, databaseId])
|
||||
}
|
||||
|
||||
model DatabaseSettings {
|
||||
@@ -348,6 +409,8 @@ model Service {
|
||||
wordpress Wordpress?
|
||||
appwrite Appwrite?
|
||||
searxng Searxng?
|
||||
weblate Weblate?
|
||||
taiga Taiga?
|
||||
}
|
||||
|
||||
model PlausibleAnalytics {
|
||||
@@ -559,3 +622,38 @@ model Searxng {
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Weblate {
|
||||
id String @id @default(cuid())
|
||||
adminPassword String
|
||||
postgresqlHost String
|
||||
postgresqlPort Int
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
model Taiga {
|
||||
id String @id @default(cuid())
|
||||
secretKey String
|
||||
erlangSecret String
|
||||
djangoAdminPassword String
|
||||
djangoAdminUser String
|
||||
rabbitMQUser String
|
||||
rabbitMQPassword String
|
||||
postgresqlHost String
|
||||
postgresqlPort Int
|
||||
postgresqlUser String
|
||||
postgresqlPassword String
|
||||
postgresqlDatabase String
|
||||
postgresqlPublicPort Int?
|
||||
serviceId String @unique
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
service Service @relation(fields: [serviceId], references: [id])
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ const algorithm = 'aes-256-ctr';
|
||||
|
||||
async function main() {
|
||||
// Enable registration for the first user
|
||||
// Set initial HAProxy password
|
||||
const settingsFound = await prisma.setting.findFirst({});
|
||||
if (!settingsFound) {
|
||||
await prisma.setting.create({
|
||||
@@ -25,7 +24,8 @@ async function main() {
|
||||
isRegistrationEnabled: true,
|
||||
proxyPassword: encrypt(generatePassword()),
|
||||
proxyUser: cuid(),
|
||||
arch: process.arch
|
||||
arch: process.arch,
|
||||
DNSServers: '1.1.1.1,8.8.8.8'
|
||||
}
|
||||
});
|
||||
} else {
|
||||
@@ -94,6 +94,16 @@ async function main() {
|
||||
}
|
||||
});
|
||||
}
|
||||
// Set new preview secrets
|
||||
const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } })
|
||||
if (secrets.length > 0) {
|
||||
for (const secret of secrets) {
|
||||
const previewSecrets = await prisma.secret.findMany({ where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true } })
|
||||
if (previewSecrets.length === 0) {
|
||||
await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
main()
|
||||
.catch((e) => {
|
||||
|
||||
@@ -3,9 +3,10 @@ import cors from '@fastify/cors';
|
||||
import serve from '@fastify/static';
|
||||
import env from '@fastify/env';
|
||||
import cookie from '@fastify/cookie';
|
||||
import multipart from '@fastify/multipart';
|
||||
import path, { join } from 'path';
|
||||
import autoLoad from '@fastify/autoload';
|
||||
import { asyncExecShell, isDev, listSettings, prisma, version } from './lib/common';
|
||||
import { asyncExecShell, createRemoteEngineConfiguration, getDomain, isDev, listSettings, prisma, version } from './lib/common';
|
||||
import { scheduler } from './lib/scheduler';
|
||||
import { compareVersions } from 'compare-versions';
|
||||
import Graceful from '@ladjs/graceful'
|
||||
@@ -26,119 +27,149 @@ declare module 'fastify' {
|
||||
|
||||
const port = isDev ? 3001 : 3000;
|
||||
const host = '0.0.0.0';
|
||||
const fastify = Fastify({
|
||||
logger: false,
|
||||
trustProxy: true
|
||||
});
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||
properties: {
|
||||
COOLIFY_APP_ID: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_SECRET_KEY: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_DATABASE_URL: {
|
||||
type: 'string',
|
||||
default: 'file:../db/dev.db'
|
||||
},
|
||||
COOLIFY_SENTRY_DSN: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_IS_ON: {
|
||||
type: 'string',
|
||||
default: 'docker'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED_ICON: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_AUTO_UPDATE: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
const options = {
|
||||
schema,
|
||||
dotenv: true
|
||||
};
|
||||
fastify.register(env, options);
|
||||
if (!isDev) {
|
||||
fastify.register(serve, {
|
||||
root: path.join(__dirname, './public'),
|
||||
preCompressed: true
|
||||
prisma.setting.findFirst().then(async (settings) => {
|
||||
const fastify = Fastify({
|
||||
logger: settings?.isAPIDebuggingEnabled || false,
|
||||
trustProxy: true
|
||||
});
|
||||
fastify.setNotFoundHandler(async function (request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
}
|
||||
return reply.status(200).sendFile('index.html');
|
||||
});
|
||||
}
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'plugins')
|
||||
});
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'routes')
|
||||
});
|
||||
|
||||
fastify.register(cookie)
|
||||
fastify.register(cors);
|
||||
fastify.listen({ port, host }, async (err: any, address: any) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
const schema = {
|
||||
type: 'object',
|
||||
required: ['COOLIFY_SECRET_KEY', 'COOLIFY_DATABASE_URL', 'COOLIFY_IS_ON'],
|
||||
properties: {
|
||||
COOLIFY_APP_ID: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_SECRET_KEY: {
|
||||
type: 'string',
|
||||
},
|
||||
COOLIFY_DATABASE_URL: {
|
||||
type: 'string',
|
||||
default: 'file:../db/dev.db'
|
||||
},
|
||||
COOLIFY_SENTRY_DSN: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_IS_ON: {
|
||||
type: 'string',
|
||||
default: 'docker'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
COOLIFY_WHITE_LABELED_ICON: {
|
||||
type: 'string',
|
||||
default: null
|
||||
},
|
||||
COOLIFY_AUTO_UPDATE: {
|
||||
type: 'string',
|
||||
default: 'false'
|
||||
},
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
const options = {
|
||||
schema,
|
||||
dotenv: true
|
||||
};
|
||||
fastify.register(env, options);
|
||||
if (!isDev) {
|
||||
fastify.register(serve, {
|
||||
root: path.join(__dirname, './public'),
|
||||
preCompressed: true
|
||||
});
|
||||
fastify.setNotFoundHandler(async function (request, reply) {
|
||||
if (request.raw.url && request.raw.url.startsWith('/api')) {
|
||||
return reply.status(404).send({
|
||||
success: false
|
||||
});
|
||||
}
|
||||
return reply.status(200).sendFile('index.html');
|
||||
});
|
||||
}
|
||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||
await initServer();
|
||||
fastify.register(multipart, { limits: { fileSize: 100000 } });
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'plugins')
|
||||
});
|
||||
fastify.register(autoLoad, {
|
||||
dir: join(__dirname, 'routes')
|
||||
});
|
||||
fastify.register(cookie)
|
||||
fastify.register(cors);
|
||||
fastify.addHook('onRequest', async (request, reply) => {
|
||||
let allowedList = ['coolify:3000'];
|
||||
const { ipv4, ipv6, fqdn } = await prisma.setting.findFirst({})
|
||||
|
||||
const graceful = new Graceful({ brees: [scheduler] });
|
||||
graceful.listen();
|
||||
|
||||
setInterval(async () => {
|
||||
if (!scheduler.workers.has('deployApplication')) {
|
||||
scheduler.run('deployApplication');
|
||||
ipv4 && allowedList.push(`${ipv4}:3000`);
|
||||
ipv6 && allowedList.push(ipv6);
|
||||
fqdn && allowedList.push(getDomain(fqdn));
|
||||
isDev && allowedList.push('localhost:3000') && allowedList.push('localhost:3001') && allowedList.push('host.docker.internal:3001');
|
||||
const remotes = await prisma.destinationDocker.findMany({ where: { remoteEngine: true, remoteVerified: true } })
|
||||
if (remotes.length > 0) {
|
||||
remotes.forEach(remote => {
|
||||
allowedList.push(`${remote.remoteIpAddress}:3000`);
|
||||
})
|
||||
}
|
||||
if (!scheduler.workers.has('infrastructure')) {
|
||||
scheduler.run('infrastructure');
|
||||
if (!allowedList.includes(request.headers.host)) {
|
||||
// console.log('not allowed', request.headers.host)
|
||||
}
|
||||
}, 2000)
|
||||
})
|
||||
fastify.listen({ port, host }, async (err: any, address: any) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`Coolify's API is listening on ${host}:${port}`);
|
||||
await initServer();
|
||||
|
||||
// autoUpdater
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:autoUpdater")
|
||||
}, isDev ? 5000 : 60000 * 15)
|
||||
const graceful = new Graceful({ brees: [scheduler] });
|
||||
graceful.listen();
|
||||
|
||||
// cleanupStorage
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupStorage")
|
||||
}, isDev ? 6000 : 60000 * 10)
|
||||
setInterval(async () => {
|
||||
if (!scheduler.workers.has('deployApplication')) {
|
||||
scheduler.run('deployApplication');
|
||||
}
|
||||
if (!scheduler.workers.has('infrastructure')) {
|
||||
scheduler.run('infrastructure');
|
||||
}
|
||||
}, 2000)
|
||||
|
||||
// checkProxies
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkProxies")
|
||||
}, 10000)
|
||||
// autoUpdater
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:autoUpdater")
|
||||
}, isDev ? 5000 : 60000 * 15)
|
||||
|
||||
// cleanupPrismaEngines
|
||||
// setInterval(async () => {
|
||||
// scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupPrismaEngines")
|
||||
// }, 60000)
|
||||
// cleanupStorage
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupStorage")
|
||||
}, isDev ? 6000 : 60000 * 10)
|
||||
|
||||
// checkProxies and checkFluentBit
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkProxies")
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:checkFluentBit")
|
||||
}, 10000)
|
||||
|
||||
setInterval(async () => {
|
||||
scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:copySSLCertificates")
|
||||
}, 2000)
|
||||
|
||||
// cleanupPrismaEngines
|
||||
// setInterval(async () => {
|
||||
// scheduler.workers.has('infrastructure') && scheduler.workers.get('infrastructure').postMessage("action:cleanupPrismaEngines")
|
||||
// }, 60000)
|
||||
|
||||
await Promise.all([
|
||||
getArch(),
|
||||
getIPAddress(),
|
||||
configureRemoteDockers(),
|
||||
])
|
||||
});
|
||||
})
|
||||
|
||||
await getArch();
|
||||
await getIPAddress();
|
||||
});
|
||||
async function getIPAddress() {
|
||||
const { publicIpv4, publicIpv6 } = await import('public-ip')
|
||||
try {
|
||||
@@ -175,4 +206,15 @@ async function getArch() {
|
||||
} catch (error) { }
|
||||
}
|
||||
|
||||
|
||||
async function configureRemoteDockers() {
|
||||
try {
|
||||
const remoteDocker = await prisma.destinationDocker.findMany({
|
||||
where: { remoteVerified: true, remoteEngine: true }
|
||||
});
|
||||
if (remoteDocker.length > 0) {
|
||||
for (const docker of remoteDocker) {
|
||||
await createRemoteEngineConfiguration(docker.id)
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
|
||||
@@ -38,8 +38,16 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
for (const queueBuild of queuedBuilds) {
|
||||
actions.push(async () => {
|
||||
let application = await prisma.application.findUnique({ where: { id: queueBuild.applicationId }, include: { destinationDocker: true, gitSource: { include: { githubApp: true, gitlabApp: true } }, persistentStorage: true, secrets: true, settings: true, teams: true } })
|
||||
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, forceRebuild } = queueBuild
|
||||
let { id: buildId, type, sourceBranch = null, pullmergeRequestId = null, previewApplicationId = null, forceRebuild } = queueBuild
|
||||
application = decryptApplication(application)
|
||||
const originalApplicationId = application.id
|
||||
if (pullmergeRequestId) {
|
||||
const previewApplications = await prisma.previewApplication.findMany({ where: { applicationId: originalApplicationId, pullmergeRequestId } })
|
||||
if (previewApplications.length > 0) {
|
||||
previewApplicationId = previewApplications[0].id
|
||||
}
|
||||
}
|
||||
const usableApplicationId = previewApplicationId || originalApplicationId
|
||||
try {
|
||||
if (queueBuild.status === 'running') {
|
||||
await saveBuildLog({ line: 'Building halted, restarting...', buildId, applicationId: application.id });
|
||||
@@ -104,17 +112,17 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
)
|
||||
.digest('hex');
|
||||
const { debug } = settings;
|
||||
if (concurrency === 1) {
|
||||
await prisma.build.updateMany({
|
||||
where: {
|
||||
status: { in: ['queued', 'running'] },
|
||||
id: { not: buildId },
|
||||
applicationId,
|
||||
createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||
},
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
}
|
||||
// if (concurrency === 1) {
|
||||
// await prisma.build.updateMany({
|
||||
// where: {
|
||||
// status: { in: ['queued', 'running'] },
|
||||
// id: { not: buildId },
|
||||
// applicationId,
|
||||
// createdAt: { lt: new Date(new Date().getTime() - 10 * 1000) }
|
||||
// },
|
||||
// data: { status: 'failed' }
|
||||
// });
|
||||
// }
|
||||
let imageId = applicationId;
|
||||
let domain = getDomain(fqdn);
|
||||
const volumes =
|
||||
@@ -146,7 +154,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
baseDirectory = configuration.baseDirectory;
|
||||
baseDirectory = configuration.baseDirectory || '';
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
denoMainFile = configuration.denoMainFile;
|
||||
const commit = await importers[gitSource.type]({
|
||||
@@ -177,9 +185,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
|
||||
try {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { commit } });
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
} catch (err) { }
|
||||
|
||||
if (!pullmergeRequestId) {
|
||||
if (configHash !== currentHash) {
|
||||
@@ -205,7 +211,6 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
//
|
||||
}
|
||||
await copyBaseConfigurationFiles(buildPack, workdir, buildId, applicationId, baseImage);
|
||||
|
||||
if (forceRebuild) deployNeeded = true
|
||||
if (!imageFound || deployNeeded) {
|
||||
// if (true) {
|
||||
@@ -263,7 +268,10 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
@@ -337,10 +345,15 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await saveBuildLog({ line: 'Deployment successful!', buildId, applicationId });
|
||||
} catch (error) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId });
|
||||
await prisma.build.updateMany({
|
||||
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
throw new Error(error);
|
||||
}
|
||||
await saveBuildLog({ line: 'Proxy will be updated shortly.', buildId, applicationId });
|
||||
@@ -352,28 +365,29 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
await prisma.build.updateMany({
|
||||
where: { id: buildId, status: { in: ['queued', 'running'] } },
|
||||
data: { status: 'failed' }
|
||||
});
|
||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||
const foundBuild = await prisma.build.findUnique({ where: { id: buildId } })
|
||||
if (foundBuild) {
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
}
|
||||
if (error !== 1) {
|
||||
await saveBuildLog({ line: error, buildId, applicationId: application.id });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
await pAll.default(actions, { concurrency })
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
}
|
||||
})
|
||||
|
||||
while (true) {
|
||||
await th()
|
||||
}
|
||||
|
||||
|
||||
} else process.exit(0);
|
||||
})();
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import axios from 'axios';
|
||||
import { compareVersions } from 'compare-versions';
|
||||
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version } from '../lib/common';
|
||||
|
||||
async function disconnect() {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
import { asyncExecShell, cleanupDockerStorage, executeDockerCmd, isDev, prisma, startTraefikTCPProxy, generateDatabaseConfiguration, startTraefikProxy, listSettings, version, createRemoteEngineConfiguration, decrypt, executeSSHCmd } from '../lib/common';
|
||||
import { checkContainer } from '../lib/docker';
|
||||
import fs from 'fs/promises'
|
||||
async function autoUpdater() {
|
||||
try {
|
||||
const currentVersion = version;
|
||||
@@ -24,19 +22,84 @@ async function autoUpdater() {
|
||||
const activeCount = 0
|
||||
if (activeCount === 0) {
|
||||
if (!isDev) {
|
||||
console.log(`Updating Coolify to ${latestVersion}.`);
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||
);
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
if (isAutoUpdateEnabled) {
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log('Updating (not really in dev mode).');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
async function checkFluentBit() {
|
||||
if (!isDev) {
|
||||
const engine = '/var/run/docker.sock';
|
||||
const { id } = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify' }
|
||||
});
|
||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-fluentbit' });
|
||||
if (!found) {
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(`docker compose up -d fluent-bit`);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function copyRemoteCertificates(id: string, dockerId: string, remoteIpAddress: string) {
|
||||
try {
|
||||
await asyncExecShell(`scp /tmp/${id}-cert.pem /tmp/${id}-key.pem ${remoteIpAddress}:/tmp/`)
|
||||
await executeSSHCmd({ dockerId, command: `docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'` })
|
||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
await executeSSHCmd({ dockerId, command: `docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/` })
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log({ error })
|
||||
}
|
||||
}
|
||||
async function copyLocalCertificates(id: string) {
|
||||
try {
|
||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'test -d /etc/traefik/acme/custom/ || mkdir -p /etc/traefik/acme/custom/'`)
|
||||
await asyncExecShell(`docker cp /tmp/${id}-key.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
||||
await asyncExecShell(`docker cp /tmp/${id}-cert.pem coolify-proxy:/etc/traefik/acme/custom/`)
|
||||
} catch (error) {
|
||||
console.log({ error })
|
||||
}
|
||||
}
|
||||
async function copySSLCertificates() {
|
||||
try {
|
||||
const pAll = await import('p-all');
|
||||
const actions = []
|
||||
const certificates = await prisma.certificate.findMany({ include: { team: true } })
|
||||
const teamIds = certificates.map(c => c.teamId)
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { isCoolifyProxyUsed: true, teams: { some: { id: { in: [...teamIds] } } } } })
|
||||
for (const certificate of certificates) {
|
||||
const { id, key, cert } = certificate
|
||||
const decryptedKey = decrypt(key)
|
||||
await fs.writeFile(`/tmp/${id}-key.pem`, decryptedKey)
|
||||
await fs.writeFile(`/tmp/${id}-cert.pem`, cert)
|
||||
for (const destination of destinations) {
|
||||
if (destination.remoteEngine) {
|
||||
if (destination.remoteVerified) {
|
||||
const { id: dockerId, remoteIpAddress } = destination
|
||||
actions.push(async () => copyRemoteCertificates(id, dockerId, remoteIpAddress))
|
||||
}
|
||||
} else {
|
||||
actions.push(async () => copyLocalCertificates(id))
|
||||
}
|
||||
}
|
||||
}
|
||||
await pAll.default(actions, { concurrency: 1 })
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
} finally {
|
||||
await asyncExecShell(`find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete`)
|
||||
}
|
||||
}
|
||||
async function checkProxies() {
|
||||
@@ -45,18 +108,35 @@ async function checkProxies() {
|
||||
let portReachable;
|
||||
|
||||
const { arch, ipv4, ipv6 } = await listSettings();
|
||||
|
||||
// Coolify Proxy local
|
||||
const engine = '/var/run/docker.sock';
|
||||
const localDocker = await prisma.destinationDocker.findFirst({
|
||||
where: { engine, network: 'coolify' }
|
||||
where: { engine, network: 'coolify', isCoolifyProxyUsed: true }
|
||||
});
|
||||
if (localDocker && localDocker.isCoolifyProxyUsed) {
|
||||
if (localDocker) {
|
||||
portReachable = await isReachable(80, { host: ipv4 || ipv6 })
|
||||
if (!portReachable) {
|
||||
await startTraefikProxy(localDocker.id);
|
||||
}
|
||||
}
|
||||
|
||||
// Coolify Proxy remote
|
||||
const remoteDocker = await prisma.destinationDocker.findMany({
|
||||
where: { remoteEngine: true, remoteVerified: true }
|
||||
});
|
||||
if (remoteDocker.length > 0) {
|
||||
for (const docker of remoteDocker) {
|
||||
if (docker.isCoolifyProxyUsed) {
|
||||
portReachable = await isReachable(80, { host: docker.remoteIpAddress })
|
||||
if (!portReachable) {
|
||||
await startTraefikProxy(docker.id);
|
||||
}
|
||||
}
|
||||
try {
|
||||
await createRemoteEngineConfiguration(docker.id)
|
||||
} catch (error) { }
|
||||
}
|
||||
}
|
||||
// TCP Proxies
|
||||
const databasesWithPublicPort = await prisma.database.findMany({
|
||||
where: { publicPort: { not: null } },
|
||||
@@ -113,9 +193,7 @@ async function cleanupPrismaEngines() {
|
||||
if (stdout.trim() != null && stdout.trim() != '' && Number(stdout.trim()) > 1) {
|
||||
await asyncExecShell(`killall -q -e /app/prisma-engines/query-engine -o 1m`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
} catch (error) { }
|
||||
}
|
||||
}
|
||||
async function cleanupStorage() {
|
||||
@@ -166,9 +244,7 @@ async function cleanupStorage() {
|
||||
lowDiskSpace = true;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
} catch (error) { }
|
||||
await cleanupDockerStorage(destination.id, lowDiskSpace, false)
|
||||
}
|
||||
}
|
||||
@@ -176,7 +252,8 @@ async function cleanupStorage() {
|
||||
(async () => {
|
||||
let status = {
|
||||
cleanupStorage: false,
|
||||
autoUpdater: false
|
||||
autoUpdater: false,
|
||||
copySSLCertificates: false,
|
||||
}
|
||||
if (parentPort) {
|
||||
parentPort.on('message', async (message) => {
|
||||
@@ -202,6 +279,18 @@ async function cleanupStorage() {
|
||||
await checkProxies();
|
||||
return;
|
||||
}
|
||||
if (message === 'action:checkFluentBit') {
|
||||
await checkFluentBit();
|
||||
return;
|
||||
}
|
||||
if (message === 'action:copySSLCertificates') {
|
||||
if (!status.copySSLCertificates) {
|
||||
status.copySSLCertificates = true
|
||||
await copySSLCertificates();
|
||||
status.copySSLCertificates = false
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (message === 'action:autoUpdater') {
|
||||
if (!status.cleanupStorage) {
|
||||
status.autoUpdater = true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { base64Encode, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
||||
import { base64Encode, encrypt, executeDockerCmd, generateTimestamp, getDomain, isDev, prisma, version } from "../common";
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from "../dayjs";
|
||||
|
||||
@@ -89,6 +89,22 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2',
|
||||
label: 'webdevops/php-apache:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2',
|
||||
label: 'webdevops/php-nginx:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1',
|
||||
label: 'webdevops/php-apache:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1',
|
||||
label: 'webdevops/php-nginx:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0',
|
||||
label: 'webdevops/php-apache:8.0'
|
||||
@@ -145,6 +161,22 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
value: 'webdevops/php-nginx:5.6',
|
||||
label: 'webdevops/php-nginx:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2-alpine',
|
||||
label: 'webdevops/php-apache:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2-alpine',
|
||||
label: 'webdevops/php-nginx:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1-alpine',
|
||||
label: 'webdevops/php-apache:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1-alpine',
|
||||
label: 'webdevops/php-nginx:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0-alpine',
|
||||
label: 'webdevops/php-apache:8.0-alpine'
|
||||
@@ -305,18 +337,18 @@ export function setDefaultBaseImage(buildPack: string | null, deploymentType: st
|
||||
payload.baseImage = 'denoland/deno:latest';
|
||||
}
|
||||
if (buildPack === 'php') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = phpVersions;
|
||||
}
|
||||
if (buildPack === 'laravel') {
|
||||
payload.baseImage = 'webdevops/php-apache:8.0-alpine';
|
||||
payload.baseImage = 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = phpVersions;
|
||||
payload.baseBuildImage = 'node:18';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'heroku') {
|
||||
payload.baseImage = 'heroku/buildpacks:20';
|
||||
payload.baseImages = herokuVersions;
|
||||
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
@@ -352,7 +384,7 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (!baseDirectory.endsWith('/')) baseDirectory = `${baseDirectory}/`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/') baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
@@ -429,17 +461,33 @@ export const saveBuildLog = async ({
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
}): Promise<any> => {
|
||||
const { default: got } = await import('got')
|
||||
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
if (isDev) console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||
}
|
||||
});
|
||||
const fluentBitUrl = isDev ? 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
|
||||
|
||||
if (isDev) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: encrypt(line)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
if (isDev) return
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp, buildId, time: Number(day().valueOf()), applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
export async function copyBaseConfigurationFiles(
|
||||
@@ -512,7 +560,6 @@ export async function copyBaseConfigurationFiles(
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
@@ -525,7 +572,6 @@ export function checkPnpm(installCommand = null, buildCommand = null, startComma
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
@@ -541,9 +587,9 @@ export async function buildImage({
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
}
|
||||
if (!debug && isCache) {
|
||||
if (!debug) {
|
||||
await saveBuildLog({
|
||||
line: `Debug turned off. To see more details, allow it in the configuration.`,
|
||||
line: `Debug turned off. To see more details, allow it in the features tab.`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
@@ -562,30 +608,6 @@ export async function buildImage({
|
||||
}
|
||||
}
|
||||
|
||||
export async function streamEvents({ stream, docker, buildId, applicationId, debug }) {
|
||||
await new Promise((resolve, reject) => {
|
||||
docker.engine.modem.followProgress(stream, onFinished, onProgress);
|
||||
function onFinished(err, res) {
|
||||
if (err) reject(err);
|
||||
resolve(res);
|
||||
}
|
||||
async function onProgress(event) {
|
||||
if (event.error) {
|
||||
reject(event.error);
|
||||
} else if (event.stream) {
|
||||
if (event.stream !== '\n') {
|
||||
if (debug)
|
||||
await saveBuildLog({
|
||||
line: `${event.stream.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
@@ -646,8 +668,6 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
|
||||
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
@@ -657,7 +677,10 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
@@ -675,6 +698,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
// Dockerfile.push(`ARG CACHEBUST=1`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
@@ -691,7 +715,10 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -13,40 +13,33 @@ export default async function (data) {
|
||||
pullmergeRequestId,
|
||||
dockerFileLocation
|
||||
} = data
|
||||
try {
|
||||
const file = `${workdir}${dockerFileLocation}`;
|
||||
let dockerFileOut = `${workdir}`;
|
||||
if (baseDirectory) {
|
||||
dockerFileOut = `${workdir}${baseDirectory}`;
|
||||
workdir = `${workdir}${baseDirectory}`;
|
||||
}
|
||||
const Dockerfile: Array<string> = (await fs.readFile(`${file}`, 'utf8'))
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (
|
||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||
) {
|
||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||
data.workdir = `${workdir}${baseDirectory}`;
|
||||
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8')
|
||||
const Dockerfile: Array<string> = DockerfileRaw
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (
|
||||
(pullmergeRequestId && secret.isPRMRSecret) ||
|
||||
(!pullmergeRequestId && !secret.isPRMRSecret)
|
||||
) {
|
||||
Dockerfile.unshift(`ARG ${secret.name}=${secret.value}`);
|
||||
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `ARG ${secret.name}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await fs.writeFile(`${dockerFileOut}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
}
|
||||
|
||||
@@ -2,38 +2,16 @@ import { executeDockerCmd, prisma } from "../common"
|
||||
import { saveBuildLog } from "./common";
|
||||
|
||||
export default async function (data: any): Promise<void> {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir } = data
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory } = data
|
||||
try {
|
||||
|
||||
await saveBuildLog({ line: `Building image started.`, buildId, applicationId });
|
||||
const { stdout } = await executeDockerCmd({
|
||||
await executeDockerCmd({
|
||||
debug,
|
||||
dockerId,
|
||||
command: `pack build -p ${workdir} ${applicationId}:${tag} --builder heroku/buildpacks:20`
|
||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder heroku/buildpacks:20`
|
||||
})
|
||||
if (debug) {
|
||||
const array = stdout.split('\n')
|
||||
for (const line of array) {
|
||||
if (line !== '\n') {
|
||||
await saveBuildLog({
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
await saveBuildLog({ line: `Building image successful.`, buildId, applicationId });
|
||||
} catch (error) {
|
||||
const array = error.stdout.split('\n')
|
||||
for (const line of array) {
|
||||
if (line !== '\n') {
|
||||
await saveBuildLog({
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -23,7 +23,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -27,7 +27,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -16,7 +16,10 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -21,7 +21,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -24,7 +24,10 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
secrets.forEach((secret) => {
|
||||
if (secret.isBuildSecret) {
|
||||
if (pullmergeRequestId) {
|
||||
if (secret.isPRMRSecret) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
Dockerfile.push(`ARG ${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
Dockerfile.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@ export function formatLabelsOnDocker(data) {
|
||||
return container
|
||||
})
|
||||
}
|
||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<boolean> {
|
||||
export async function checkContainer({ dockerId, container, remove = false }: { dockerId: string, container: string, remove?: boolean }): Promise<{ found: boolean, status?: { isExited: boolean, isRunning: boolean, isRestarting: boolean } }> {
|
||||
let containerFound = false;
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({
|
||||
@@ -21,10 +21,12 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
||||
command:
|
||||
`docker inspect --format '{{json .State}}' ${container}`
|
||||
});
|
||||
|
||||
containerFound = true
|
||||
const parsedStdout = JSON.parse(stdout);
|
||||
const status = parsedStdout.Status;
|
||||
const isRunning = status === 'running';
|
||||
const isRestarting = status === 'restarting'
|
||||
const isExited = status === 'exited'
|
||||
if (status === 'created') {
|
||||
await executeDockerCmd({
|
||||
dockerId,
|
||||
@@ -39,13 +41,23 @@ export async function checkContainer({ dockerId, container, remove = false }: {
|
||||
`docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (isRunning) {
|
||||
containerFound = true;
|
||||
}
|
||||
|
||||
return {
|
||||
found: containerFound,
|
||||
status: {
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited
|
||||
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
// Container not found
|
||||
}
|
||||
return containerFound;
|
||||
return {
|
||||
found: false
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
export async function isContainerExited(dockerId: string, containerName: string): Promise<boolean> {
|
||||
@@ -76,7 +88,6 @@ export async function removeContainer({
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,8 @@ export default async function ({
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort
|
||||
customPort,
|
||||
forPublic
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
@@ -21,11 +22,15 @@ export default async function ({
|
||||
repodir: string;
|
||||
privateSshKey: string;
|
||||
customPort: number;
|
||||
forPublic: boolean;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
await saveBuildLog({ line: 'GitLab importer started.', buildId, applicationId });
|
||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
||||
|
||||
if (!forPublic) {
|
||||
await asyncExecShell(`echo '${privateSshKey}' > ${repodir}/id.rsa`);
|
||||
await asyncExecShell(`chmod 600 ${repodir}/id.rsa`);
|
||||
}
|
||||
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch} branch.`,
|
||||
@@ -33,9 +38,16 @@ export default async function ({
|
||||
applicationId
|
||||
});
|
||||
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
if (forPublic) {
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
} else {
|
||||
await asyncExecShell(
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git submodule update --init --recursive && git lfs pull && cd .. `
|
||||
);
|
||||
}
|
||||
|
||||
const { stdout: commit } = await asyncExecShell(`cd ${workdir}/ && git rev-parse HEAD`);
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@ Bree.extend(TSBree);
|
||||
|
||||
const options: any = {
|
||||
defaultExtension: 'js',
|
||||
// logger: new Cabin(),
|
||||
logger: false,
|
||||
logger: new Cabin(),
|
||||
// logger: false,
|
||||
workerMessageHandler: async ({ name, message }) => {
|
||||
if (name === 'deployApplication' && message?.deploying) {
|
||||
if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
|
||||
|
||||
@@ -1,23 +1,7 @@
|
||||
import { exec } from 'node:child_process'
|
||||
import util from 'util';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import forge from 'node-forge';
|
||||
import { uniqueNamesGenerator, adjectives, colors, animals } from 'unique-names-generator';
|
||||
import type { Config } from 'unique-names-generator';
|
||||
import generator from 'generate-password';
|
||||
import crypto from 'crypto';
|
||||
import { promises as dns } from 'dns';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
import cuid from 'cuid';
|
||||
import os from 'os';
|
||||
import sshConfig from 'ssh-config'
|
||||
import { encrypt, generatePassword, prisma } from '../common';
|
||||
|
||||
|
||||
export const version = '3.8.2';
|
||||
export const isDev = process.env.NODE_ENV === 'development';
|
||||
|
||||
export const includeServices: any = {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
@@ -34,7 +18,9 @@ export const includeServices: any = {
|
||||
moodle: true,
|
||||
appwrite: true,
|
||||
glitchTip: true,
|
||||
searxng: true
|
||||
searxng: true,
|
||||
weblate: true,
|
||||
taiga: true,
|
||||
};
|
||||
export async function configureServiceType({
|
||||
id,
|
||||
@@ -312,6 +298,58 @@ export async function configureServiceType({
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (type === 'weblate') {
|
||||
const adminPassword = encrypt(generatePassword({}))
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword({}));
|
||||
const postgresqlDatabase = 'weblate';
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
weblate: {
|
||||
create: {
|
||||
adminPassword,
|
||||
postgresqlHost: `${id}-postgresql`,
|
||||
postgresqlPort: 5432,
|
||||
postgresqlUser,
|
||||
postgresqlPassword,
|
||||
postgresqlDatabase,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (type === 'taiga') {
|
||||
const secretKey = encrypt(generatePassword({}))
|
||||
const erlangSecret = encrypt(generatePassword({}))
|
||||
const rabbitMQUser = cuid();
|
||||
const djangoAdminUser = cuid();
|
||||
const djangoAdminPassword = encrypt(generatePassword({}))
|
||||
const rabbitMQPassword = encrypt(generatePassword({}))
|
||||
const postgresqlUser = cuid();
|
||||
const postgresqlPassword = encrypt(generatePassword({}));
|
||||
const postgresqlDatabase = 'taiga';
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
data: {
|
||||
type,
|
||||
taiga: {
|
||||
create: {
|
||||
secretKey,
|
||||
erlangSecret,
|
||||
djangoAdminUser,
|
||||
djangoAdminPassword,
|
||||
rabbitMQUser,
|
||||
rabbitMQPassword,
|
||||
postgresqlHost: `${id}-postgresql`,
|
||||
postgresqlPort: 5432,
|
||||
postgresqlUser,
|
||||
postgresqlPassword,
|
||||
postgresqlDatabase,
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
await prisma.service.update({
|
||||
where: { id },
|
||||
@@ -338,7 +376,8 @@ export async function removeService({ id }: { id: string }): Promise<void> {
|
||||
await prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
|
||||
|
||||
await prisma.service.delete({ where: { id } });
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -599,6 +599,54 @@ export const glitchTip = [{
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpHost',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPassword',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUseSsl',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUseSsl',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: true,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpPort',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: true,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'emailSmtpUser',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'defaultEmail',
|
||||
isEditable: false,
|
||||
@@ -624,7 +672,7 @@ export const glitchTip = [{
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'defaultFromEmail',
|
||||
name: 'defaultEmailFrom',
|
||||
isEditable: true,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
@@ -687,4 +735,133 @@ export const searxng = [{
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
}]
|
||||
|
||||
export const weblate = [{
|
||||
name: 'adminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlHost',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
export const taiga = [{
|
||||
name: 'secretKey',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'djangoAdminUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'djangoAdminPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'rabbitMQUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'rabbitMQPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlHost',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPort',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlUser',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
},
|
||||
{
|
||||
name: 'postgresqlPassword',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: true
|
||||
},
|
||||
{
|
||||
name: 'postgresqlDatabase',
|
||||
isEditable: false,
|
||||
isLowerCase: false,
|
||||
isNumber: false,
|
||||
isBoolean: false,
|
||||
isEncrypted: false
|
||||
}]
|
||||
@@ -1,3 +1,24 @@
|
||||
/*
|
||||
Example of a supported version:
|
||||
{
|
||||
// Name used to identify the service internally
|
||||
name: 'umami',
|
||||
// Fancier name to show to the user
|
||||
fancyName: 'Umami',
|
||||
// Docker base image for the service
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
// Optional: If there is any dependent image, you should list it here
|
||||
images: [],
|
||||
// Usable tags
|
||||
versions: ['postgresql-latest'],
|
||||
// Which tag is the recommended
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
// Application's default port, Umami listens on 3000
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
}
|
||||
*/
|
||||
export const supportedServiceTypesAndVersions = [
|
||||
{
|
||||
name: 'plausibleanalytics',
|
||||
@@ -116,7 +137,7 @@ export const supportedServiceTypesAndVersions = [
|
||||
{
|
||||
name: 'umami',
|
||||
fancyName: 'Umami',
|
||||
baseImage: 'ghcr.io/mikecao/umami',
|
||||
baseImage: 'ghcr.io/umami-software/umami',
|
||||
images: ['postgres:12-alpine'],
|
||||
versions: ['postgresql-latest'],
|
||||
recommendedVersion: 'postgresql-latest',
|
||||
@@ -151,8 +172,8 @@ export const supportedServiceTypesAndVersions = [
|
||||
fancyName: 'Appwrite',
|
||||
baseImage: 'appwrite/appwrite',
|
||||
images: ['mariadb:10.7', 'redis:6.2-alpine', 'appwrite/telegraf:1.4.0'],
|
||||
versions: ['latest', '0.15.3'],
|
||||
recommendedVersion: '0.15.3',
|
||||
versions: ['latest', '1.0', '0.15.3'],
|
||||
recommendedVersion: '1.0',
|
||||
ports: {
|
||||
main: 80
|
||||
}
|
||||
@@ -190,4 +211,48 @@ export const supportedServiceTypesAndVersions = [
|
||||
main: 8080
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'weblate',
|
||||
fancyName: 'Weblate',
|
||||
baseImage: 'weblate/weblate',
|
||||
images: ['postgres:14-alpine', 'redis:6-alpine'],
|
||||
versions: ['latest'],
|
||||
recommendedVersion: 'latest',
|
||||
ports: {
|
||||
main: 8080
|
||||
}
|
||||
},
|
||||
// {
|
||||
// name: 'taiga',
|
||||
// fancyName: 'Taiga',
|
||||
// baseImage: 'taigaio/taiga-front',
|
||||
// images: ['postgres:12.3', 'rabbitmq:3.8-management-alpine', 'taigaio/taiga-back', 'taigaio/taiga-events', 'taigaio/taiga-protected'],
|
||||
// versions: ['latest'],
|
||||
// recommendedVersion: 'latest',
|
||||
// ports: {
|
||||
// main: 80
|
||||
// }
|
||||
// },
|
||||
{
|
||||
name: 'grafana',
|
||||
fancyName: 'Grafana',
|
||||
baseImage: 'grafana/grafana',
|
||||
images: [],
|
||||
versions: ['latest', '9.1.3', '9.1.2', '9.0.8', '8.3.11', '8.4.11', '8.5.11'],
|
||||
recommendedVersion: 'latest',
|
||||
ports: {
|
||||
main: 3000
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'trilium',
|
||||
fancyName: 'Trilium Notes',
|
||||
baseImage: 'zadam/trilium',
|
||||
images: [],
|
||||
versions: ['latest'],
|
||||
recommendedVersion: 'latest',
|
||||
ports: {
|
||||
main: 8080
|
||||
}
|
||||
},
|
||||
];
|
||||
@@ -21,7 +21,6 @@ export default fp<FastifyJWTOptions>(async (fastify, opts) => {
|
||||
try {
|
||||
await request.jwtVerify()
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
reply.send(err)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -3,16 +3,24 @@ import crypto from 'node:crypto'
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import axios from 'axios';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import csv from 'csvtojson';
|
||||
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
import { setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||
import { checkDomainsIsValidInDNS, checkDoubleBranch, checkExposedPort, decrypt, encrypt, errorHandler, executeDockerCmd, generateSshKeyPair, getContainerUsage, getDomain, getFreeExposedPort, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||
import { makeLabelForStandaloneApplication, setDefaultBaseImage, setDefaultConfiguration } from '../../../../lib/buildPacks/common';
|
||||
import { checkDomainsIsValidInDNS, checkDoubleBranch, checkExposedPort, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateSshKeyPair, getContainerUsage, getDomain, isDev, isDomainConfigured, listSettings, prisma, stopBuild, uniqueName } from '../../../../lib/common';
|
||||
import { checkContainer, formatLabelsOnDocker, isContainerExited, removeContainer } from '../../../../lib/docker';
|
||||
import { scheduler } from '../../../../lib/scheduler';
|
||||
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication } from './types';
|
||||
import type { GetImages, CancelDeployment, CheckDNS, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, GetApplicationLogs, GetBuildIdLogs, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, DeployApplication, CheckDomain, StopPreviewApplication, RestartPreviewApplication, GetBuilds } from './types';
|
||||
import { OnlyId } from '../../../../types';
|
||||
|
||||
function filterObject(obj, callback) {
|
||||
return Object.fromEntries(Object.entries(obj).
|
||||
filter(([key, val]) => callback(val, key)));
|
||||
}
|
||||
|
||||
export async function listApplications(request: FastifyRequest) {
|
||||
try {
|
||||
const { teamId } = request.user
|
||||
@@ -61,20 +69,62 @@ export async function getImages(request: FastifyRequest<GetImages>) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function cleanupUnconfiguredApplications(request: FastifyRequest<any>) {
|
||||
try {
|
||||
const teamId = request.user.teamId
|
||||
let applications = await prisma.application.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
});
|
||||
for (const application of applications) {
|
||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
||||
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
||||
const { stdout: containers } = await executeDockerCmd({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${application.id} --format '{{json .}}'`
|
||||
})
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = JSON.parse(container);
|
||||
const id = containerObj.ID;
|
||||
await removeContainer({ id, dockerId: application.destinationDocker.id });
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.applicationSettings.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.buildLog.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.build.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.secret.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.application.deleteMany({ where: { id: application.id } });
|
||||
}
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getApplicationStatus(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
|
||||
let isRestarting = false;
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
isRunning = await checkContainer({ dockerId: application.destinationDocker.id, container: id });
|
||||
isExited = await isContainerExited(application.destinationDocker.id, id);
|
||||
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: id });
|
||||
if (status?.found) {
|
||||
isRunning = status.status.isRunning;
|
||||
isExited = status.status.isExited;
|
||||
isRestarting = status.status.isRestarting
|
||||
}
|
||||
}
|
||||
return {
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited,
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
@@ -149,7 +199,9 @@ export async function getApplicationFromDB(id: string, teamId: string) {
|
||||
settings: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
secrets: true,
|
||||
persistentStorage: true
|
||||
persistentStorage: true,
|
||||
connectedDatabase: true,
|
||||
previewApplication: true
|
||||
}
|
||||
});
|
||||
if (!application) {
|
||||
@@ -176,32 +228,39 @@ export async function getApplicationFromDB(id: string, teamId: string) {
|
||||
}
|
||||
export async function getApplicationFromDBWebhook(projectId: number, branch: string) {
|
||||
try {
|
||||
let application = await prisma.application.findFirst({
|
||||
let applications = await prisma.application.findMany({
|
||||
where: { projectId, branch, settings: { autodeploy: true } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
settings: true,
|
||||
gitSource: { include: { githubApp: true, gitlabApp: true } },
|
||||
secrets: true,
|
||||
persistentStorage: true
|
||||
persistentStorage: true,
|
||||
connectedDatabase: true
|
||||
}
|
||||
});
|
||||
if (!application) {
|
||||
if (applications.length === 0) {
|
||||
throw { status: 500, message: 'Application not configured.' }
|
||||
}
|
||||
application = decryptApplication(application);
|
||||
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||
application.buildPack
|
||||
);
|
||||
applications = applications.map((application: any) => {
|
||||
application = decryptApplication(application);
|
||||
const { baseImage, baseBuildImage, baseBuildImages, baseImages } = setDefaultBaseImage(
|
||||
application.buildPack
|
||||
);
|
||||
|
||||
// Set default build images
|
||||
if (!application.baseImage) {
|
||||
application.baseImage = baseImage;
|
||||
}
|
||||
if (!application.baseBuildImage) {
|
||||
application.baseBuildImage = baseBuildImage;
|
||||
}
|
||||
return { ...application, baseBuildImages, baseImages };
|
||||
// Set default build images
|
||||
if (!application.baseImage) {
|
||||
application.baseImage = baseImage;
|
||||
}
|
||||
if (!application.baseBuildImage) {
|
||||
application.baseBuildImage = baseBuildImage;
|
||||
}
|
||||
application.baseBuildImages = baseBuildImages;
|
||||
application.baseImages = baseImages;
|
||||
return application
|
||||
})
|
||||
|
||||
return applications;
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
@@ -229,15 +288,16 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType
|
||||
deploymentType,
|
||||
baseDatabaseBranch
|
||||
} = request.body
|
||||
if (port) port = Number(port);
|
||||
if (exposePort) {
|
||||
exposePort = Number(exposePort);
|
||||
}
|
||||
|
||||
const { destinationDocker: { id: dockerId, remoteIpAddress } } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
if (exposePort) await checkExposedPort({ id, exposePort, dockerId, remoteIpAddress })
|
||||
const { destinationDocker: { engine, remoteEngine, remoteIpAddress }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
|
||||
if (denoOptions) denoOptions = denoOptions.trim();
|
||||
const defaultConfiguration = await setDefaultConfiguration({
|
||||
buildPack,
|
||||
@@ -250,22 +310,43 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
||||
dockerFileLocation,
|
||||
denoMainFile
|
||||
});
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
fqdn,
|
||||
exposePort,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
...defaultConfiguration
|
||||
}
|
||||
});
|
||||
if (baseDatabaseBranch) {
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
fqdn,
|
||||
exposePort,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
...defaultConfiguration,
|
||||
connectedDatabase: { update: { hostedDatabaseDBName: baseDatabaseBranch } }
|
||||
}
|
||||
});
|
||||
} else {
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name,
|
||||
fqdn,
|
||||
exposePort,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
denoOptions,
|
||||
baseImage,
|
||||
baseBuildImage,
|
||||
deploymentType,
|
||||
...defaultConfiguration
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
@@ -276,15 +357,10 @@ export async function saveApplication(request: FastifyRequest<SaveApplication>,
|
||||
export async function saveApplicationSettings(request: FastifyRequest<SaveApplicationSettings>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot } = request.body
|
||||
const isDouble = await checkDoubleBranch(branch, projectId);
|
||||
if (isDouble && autodeploy) {
|
||||
await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false } })
|
||||
throw { status: 500, message: 'Cannot activate automatic deployments until only one application is defined for this repository / branch.' }
|
||||
}
|
||||
const { debug, previews, dualCerts, autodeploy, branch, projectId, isBot, isDBBranching, isCustomSSL } = request.body
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot } } },
|
||||
data: { fqdn: isBot ? null : undefined, settings: { update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching, isCustomSSL } } },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
return reply.code(201).send();
|
||||
@@ -302,16 +378,127 @@ export async function stopPreviewApplication(request: FastifyRequest<StopPreview
|
||||
if (application?.destinationDockerId) {
|
||||
const container = `${id}-${pullmergeRequestId}`
|
||||
const { id: dockerId } = application.destinationDocker;
|
||||
const found = await checkContainer({ dockerId, container });
|
||||
const { found } = await checkContainer({ dockerId, container });
|
||||
if (found) {
|
||||
await removeContainer({ id: container, dockerId: application.destinationDocker.id });
|
||||
}
|
||||
await prisma.previewApplication.deleteMany({ where: { applicationId: application.id, pullmergeRequestId } })
|
||||
}
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function restartApplication(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
let application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const buildId = cuid();
|
||||
const { id: dockerId, network } = application.destinationDocker;
|
||||
const { secrets, pullmergeRequestId, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
||||
|
||||
const envs = [
|
||||
`PORT=${port}`
|
||||
];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
const { workdir } = await createDirectories({ repository, buildId });
|
||||
const labels = []
|
||||
let image = null
|
||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}' --format '{{json .}}'` })
|
||||
const containersArray = container.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = formatLabelsOnDocker(container);
|
||||
image = containerObj[0].Image
|
||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||
if (key.startsWith('coolify')) {
|
||||
labels.push(`${key}=${containerObj[0].Labels[key]}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
let imageFound = false;
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
dockerId,
|
||||
command: `docker image inspect ${image}`
|
||||
})
|
||||
imageFound = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
if (!imageFound) {
|
||||
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[applicationId]: {
|
||||
image,
|
||||
container_name: applicationId,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(network),
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
return reply.code(201).send();
|
||||
}
|
||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function stopApplication(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
@@ -319,7 +506,7 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const { id: dockerId } = application.destinationDocker;
|
||||
const found = await checkContainer({ dockerId, container: id });
|
||||
const { found } = await checkContainer({ dockerId, container: id });
|
||||
if (found) {
|
||||
await removeContainer({ id, dockerId: application.destinationDocker.id });
|
||||
}
|
||||
@@ -332,12 +519,14 @@ export async function stopApplication(request: FastifyRequest<OnlyId>, reply: Fa
|
||||
export async function deleteApplication(request: FastifyRequest<DeleteApplication>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { force } = request.body
|
||||
|
||||
const { teamId } = request.user
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
});
|
||||
if (application?.destinationDockerId && application.destinationDocker?.network) {
|
||||
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
||||
const { stdout: containers } = await executeDockerCmd({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||
@@ -356,6 +545,7 @@ export async function deleteApplication(request: FastifyRequest<DeleteApplicatio
|
||||
await prisma.build.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.secret.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: id } });
|
||||
if (teamId === '0') {
|
||||
await prisma.application.deleteMany({ where: { id } });
|
||||
} else {
|
||||
@@ -379,20 +569,22 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
|
||||
export async function checkDNS(request: FastifyRequest<CheckDNS>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
|
||||
let { exposePort, fqdn, forceSave, dualCerts } = request.body
|
||||
|
||||
if (fqdn) fqdn = fqdn.toLowerCase();
|
||||
if (!fqdn) {
|
||||
return {}
|
||||
} else {
|
||||
fqdn = fqdn.toLowerCase();
|
||||
}
|
||||
if (exposePort) exposePort = Number(exposePort);
|
||||
|
||||
const { destinationDocker: { id: dockerId, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
const { destinationDocker: { engine, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
|
||||
|
||||
const found = await isDomainConfigured({ id, fqdn, remoteIpAddress });
|
||||
if (found) {
|
||||
throw { status: 500, message: `Domain ${getDomain(fqdn).replace('www.', '')} is already in use!` }
|
||||
}
|
||||
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress })
|
||||
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
|
||||
if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||
let hostname = request.hostname.split(':')[0];
|
||||
if (remoteEngine) hostname = remoteIpAddress;
|
||||
@@ -458,7 +650,7 @@ export async function deployApplication(request: FastifyRequest<DeployApplicatio
|
||||
githubAppId: application.gitSource?.githubApp?.id,
|
||||
gitlabAppId: application.gitSource?.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'manual'
|
||||
type: pullmergeRequestId ? application.gitSource?.githubApp?.id ? 'manual_pr' : 'manual_mr' : 'manual'
|
||||
}
|
||||
});
|
||||
return {
|
||||
@@ -558,12 +750,12 @@ export async function saveRepository(request, reply) {
|
||||
data: { repository, branch, projectId, settings: { update: { autodeploy, isPublicRepository } } }
|
||||
});
|
||||
}
|
||||
if (!isPublicRepository) {
|
||||
const isDouble = await checkDoubleBranch(branch, projectId);
|
||||
if (isDouble) {
|
||||
await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false, isPublicRepository } })
|
||||
}
|
||||
}
|
||||
// if (!isPublicRepository) {
|
||||
// const isDouble = await checkDoubleBranch(branch, projectId);
|
||||
// if (isDouble) {
|
||||
// await prisma.applicationSettings.updateMany({ where: { application: { branch, projectId } }, data: { autodeploy: false, isPublicRepository } })
|
||||
// }
|
||||
// }
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
@@ -606,7 +798,20 @@ export async function saveBuildPack(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { buildPack } = request.body
|
||||
await prisma.application.update({ where: { id }, data: { buildPack } });
|
||||
const { baseImage, baseBuildImage } = setDefaultBaseImage(
|
||||
buildPack
|
||||
);
|
||||
await prisma.application.update({ where: { id }, data: { buildPack, baseImage, baseBuildImage } });
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveConnectedDatabase(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { databaseId, type } = request.body
|
||||
await prisma.application.update({ where: { id }, data: { connectedDatabase: { upsert: { create: { database: { connect: { id: databaseId } }, hostedDatabaseType: type }, update: { database: { connect: { id: databaseId } }, hostedDatabaseType: type } } } } })
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
@@ -616,55 +821,79 @@ export async function saveBuildPack(request, reply) {
|
||||
export async function getSecrets(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
|
||||
let secrets = await prisma.secret.findMany({
|
||||
where: { applicationId: id },
|
||||
orderBy: { createdAt: 'desc' }
|
||||
where: { applicationId: id, isPRMRSecret: false },
|
||||
orderBy: { createdAt: 'asc' }
|
||||
});
|
||||
let previewSecrets = await prisma.secret.findMany({
|
||||
where: { applicationId: id, isPRMRSecret: true },
|
||||
orderBy: { createdAt: 'asc' }
|
||||
});
|
||||
|
||||
secrets = secrets.map((secret) => {
|
||||
secret.value = decrypt(secret.value);
|
||||
return secret;
|
||||
});
|
||||
secrets = secrets.filter((secret) => !secret.isPRMRSecret).sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
})
|
||||
previewSecrets = previewSecrets.map((secret) => {
|
||||
secret.value = decrypt(secret.value);
|
||||
return secret;
|
||||
});
|
||||
|
||||
return {
|
||||
secrets
|
||||
previewSecrets: previewSecrets.sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
}),
|
||||
secrets: secrets.sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
})
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function updatePreviewSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, value } = request.body
|
||||
if (value) {
|
||||
value = encrypt(value.trim())
|
||||
} else {
|
||||
value = ''
|
||||
}
|
||||
await prisma.secret.updateMany({
|
||||
where: { applicationId: id, name, isPRMRSecret: true },
|
||||
data: { value }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function updateSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { name, value, isBuildSecret = undefined } = request.body
|
||||
await prisma.secret.updateMany({
|
||||
where: { applicationId: id, name },
|
||||
data: { value: encrypt(value.trim()), isBuildSecret }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function saveSecret(request: FastifyRequest<SaveSecret>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, value, isBuildSecret, isPRMRSecret, isNew } = request.body
|
||||
|
||||
if (isNew) {
|
||||
const found = await prisma.secret.findFirst({ where: { name, applicationId: id, isPRMRSecret } });
|
||||
if (found) {
|
||||
throw { status: 500, message: `Secret ${name} already exists.` }
|
||||
} else {
|
||||
value = encrypt(value.trim());
|
||||
await prisma.secret.create({
|
||||
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
value = encrypt(value.trim());
|
||||
const found = await prisma.secret.findFirst({ where: { applicationId: id, name, isPRMRSecret } });
|
||||
|
||||
if (found) {
|
||||
await prisma.secret.updateMany({
|
||||
where: { applicationId: id, name, isPRMRSecret },
|
||||
data: { value, isBuildSecret, isPRMRSecret }
|
||||
});
|
||||
} else {
|
||||
await prisma.secret.create({
|
||||
data: { name, value, isBuildSecret, isPRMRSecret, application: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
const { name, value, isBuildSecret = false } = request.body
|
||||
await prisma.secret.create({
|
||||
data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: false, application: { connect: { id } } }
|
||||
});
|
||||
await prisma.secret.create({
|
||||
data: { name, value: encrypt(value.trim()), isBuildSecret, isPRMRSecret: true, application: { connect: { id } } }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
@@ -725,6 +954,181 @@ export async function deleteStorage(request: FastifyRequest<DeleteStorage>) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function restartPreview(request: FastifyRequest<RestartPreviewApplication>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id, pullmergeRequestId } = request.params
|
||||
const { teamId } = request.user
|
||||
let application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const buildId = cuid();
|
||||
const { id: dockerId, network } = application.destinationDocker;
|
||||
const { secrets, port, repository, persistentStorage, id: applicationId, buildPack, exposePort } = application;
|
||||
|
||||
const envs = [
|
||||
`PORT=${port}`
|
||||
];
|
||||
if (secrets.length > 0) {
|
||||
secrets.forEach((secret) => {
|
||||
if (pullmergeRequestId) {
|
||||
const isSecretFound = secrets.filter(s => s.name === secret.name && s.isPRMRSecret)
|
||||
if (isSecretFound.length > 0) {
|
||||
envs.push(`${secret.name}=${isSecretFound[0].value}`);
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
if (!secret.isPRMRSecret) {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
const { workdir } = await createDirectories({ repository, buildId });
|
||||
const labels = []
|
||||
let image = null
|
||||
const { stdout: container } = await executeDockerCmd({ dockerId, command: `docker container ls --filter 'label=com.docker.compose.service=${id}-${pullmergeRequestId}' --format '{{json .}}'` })
|
||||
const containersArray = container.trim().split('\n');
|
||||
for (const container of containersArray) {
|
||||
const containerObj = formatLabelsOnDocker(container);
|
||||
image = containerObj[0].Image
|
||||
Object.keys(containerObj[0].Labels).forEach(function (key) {
|
||||
if (key.startsWith('coolify')) {
|
||||
labels.push(`${key}=${containerObj[0].Labels[key]}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
let imageFound = false;
|
||||
try {
|
||||
await executeDockerCmd({
|
||||
dockerId,
|
||||
command: `docker image inspect ${image}`
|
||||
})
|
||||
imageFound = true;
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
if (!imageFound) {
|
||||
throw { status: 500, message: 'Image not found, cannot restart application.' }
|
||||
}
|
||||
await fs.writeFile(`${workdir}/.env`, envs.join('\n'));
|
||||
|
||||
let envFound = false;
|
||||
try {
|
||||
envFound = !!(await fs.stat(`${workdir}/.env`));
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[`${applicationId}-${pullmergeRequestId}`]: {
|
||||
image,
|
||||
container_name: `${applicationId}-${pullmergeRequestId}`,
|
||||
volumes,
|
||||
env_file: envFound ? [`${workdir}/.env`] : [],
|
||||
labels,
|
||||
depends_on: [],
|
||||
expose: [port],
|
||||
...(exposePort ? { ports: [`${exposePort}:${port}`] } : {}),
|
||||
...defaultComposeConfiguration(network),
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: true
|
||||
}
|
||||
},
|
||||
volumes: Object.assign({}, ...composeVolumes)
|
||||
};
|
||||
await fs.writeFile(`${workdir}/docker-compose.yml`, yaml.dump(composeFile));
|
||||
await executeDockerCmd({ dockerId, command: `docker stop -t 0 ${id}-${pullmergeRequestId}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` })
|
||||
await executeDockerCmd({ dockerId, command: `docker compose --project-directory ${workdir} up -d` })
|
||||
return reply.code(201).send();
|
||||
}
|
||||
throw { status: 500, message: 'Application cannot be restarted.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getPreviewStatus(request: FastifyRequest<RestartPreviewApplication>) {
|
||||
try {
|
||||
const { id, pullmergeRequestId } = request.params
|
||||
const { teamId } = request.user
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
let isRestarting = false;
|
||||
let isBuilding = false
|
||||
const application: any = await getApplicationFromDB(id, teamId);
|
||||
if (application?.destinationDockerId) {
|
||||
const status = await checkContainer({ dockerId: application.destinationDocker.id, container: `${id}-${pullmergeRequestId}` });
|
||||
if (status?.found) {
|
||||
isRunning = status.status.isRunning;
|
||||
isExited = status.status.isExited;
|
||||
isRestarting = status.status.isRestarting
|
||||
}
|
||||
const building = await prisma.build.findMany({ where: { applicationId: id, pullmergeRequestId, status: { in: ['queued', 'running'] } } })
|
||||
isBuilding = building.length > 0
|
||||
}
|
||||
return {
|
||||
isBuilding,
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited,
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function loadPreviews(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
||||
if (stdout === '') {
|
||||
throw { status: 500, message: 'No previews found.' }
|
||||
}
|
||||
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
|
||||
|
||||
const jsonContainers = containers
|
||||
.map((container) =>
|
||||
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
|
||||
)
|
||||
.filter((container) => {
|
||||
return container.pullmergeRequestId && container.applicationId === id;
|
||||
});
|
||||
for (const container of jsonContainers) {
|
||||
const found = await prisma.previewApplication.findMany({ where: { applicationId: container.applicationId, pullmergeRequestId: container.pullmergeRequestId } })
|
||||
if (found.length === 0) {
|
||||
await prisma.previewApplication.create({
|
||||
data: {
|
||||
pullmergeRequestId: container.pullmergeRequestId,
|
||||
sourceBranch: container.branch,
|
||||
customDomain: container.fqdn,
|
||||
application: { connect: { id: container.applicationId } }
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
return {
|
||||
previews: await prisma.previewApplication.findMany({ where: { applicationId: id } })
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getPreviews(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
@@ -740,26 +1144,7 @@ export async function getPreviews(request: FastifyRequest<OnlyId>) {
|
||||
|
||||
const applicationSecrets = secrets.filter((secret) => !secret.isPRMRSecret);
|
||||
const PRMRSecrets = secrets.filter((secret) => secret.isPRMRSecret);
|
||||
const application = await prisma.application.findUnique({ where: { id }, include: { destinationDocker: true } });
|
||||
const { stdout } = await executeDockerCmd({ dockerId: application.destinationDocker.id, command: `docker container ls --filter 'name=${id}-' --format "{{json .}}"` })
|
||||
if (stdout === '') {
|
||||
return {
|
||||
containers: [],
|
||||
applicationSecrets: [],
|
||||
PRMRSecrets: []
|
||||
}
|
||||
}
|
||||
const containers = formatLabelsOnDocker(stdout).filter(container => container.Labels['coolify.configuration'] && container.Labels['coolify.type'] === 'standalone-application')
|
||||
|
||||
const jsonContainers = containers
|
||||
.map((container) =>
|
||||
JSON.parse(Buffer.from(container.Labels['coolify.configuration'], 'base64').toString())
|
||||
)
|
||||
.filter((container) => {
|
||||
return container.pullmergeRequestId && container.applicationId === id;
|
||||
});
|
||||
return {
|
||||
containers: jsonContainers,
|
||||
applicationSecrets: applicationSecrets.sort((a, b) => {
|
||||
return ('' + a.name).localeCompare(b.name);
|
||||
}),
|
||||
@@ -768,7 +1153,6 @@ export async function getPreviews(request: FastifyRequest<OnlyId>) {
|
||||
})
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
console.log({ status, message })
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -812,7 +1196,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
|
||||
export async function getBuilds(request: FastifyRequest<GetBuilds>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { buildId, skip = 0 } = request.query
|
||||
@@ -829,17 +1213,15 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
|
||||
builds = await prisma.build.findMany({
|
||||
where: { applicationId: id },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: 5,
|
||||
skip
|
||||
take: 5 + skip
|
||||
});
|
||||
}
|
||||
|
||||
builds = builds.map((build) => {
|
||||
const updatedAt = day(build.updatedAt).utc();
|
||||
build.took = updatedAt.diff(day(build.createdAt)) / 1000;
|
||||
build.since = updatedAt.fromNow();
|
||||
return build;
|
||||
});
|
||||
if (build.status === 'running') {
|
||||
build.elapsed = (day().utc().diff(day(build.createdAt)) / 1000).toFixed(0);
|
||||
}
|
||||
return build
|
||||
})
|
||||
return {
|
||||
builds,
|
||||
buildCount
|
||||
@@ -851,18 +1233,50 @@ export async function getBuildLogs(request: FastifyRequest<GetBuildLogs>) {
|
||||
|
||||
export async function getBuildIdLogs(request: FastifyRequest<GetBuildIdLogs>) {
|
||||
try {
|
||||
const { buildId } = request.params
|
||||
// TODO: Fluentbit could still hold the logs, so we need to check if the logs are done
|
||||
const { buildId, id } = request.params
|
||||
let { sequence = 0 } = request.query
|
||||
if (typeof sequence !== 'number') {
|
||||
sequence = Number(sequence)
|
||||
}
|
||||
let logs = await prisma.buildLog.findMany({
|
||||
where: { buildId, time: { gt: sequence } },
|
||||
orderBy: { time: 'asc' }
|
||||
});
|
||||
let file = `/app/logs/${id}_buildlog_${buildId}.csv`
|
||||
if (isDev) {
|
||||
file = `${process.cwd()}/../../logs/${id}_buildlog_${buildId}.csv`
|
||||
}
|
||||
const data = await prisma.build.findFirst({ where: { id: buildId } });
|
||||
const createdAt = day(data.createdAt).utc();
|
||||
try {
|
||||
await fs.stat(file)
|
||||
} catch (error) {
|
||||
let logs = await prisma.buildLog.findMany({
|
||||
where: { buildId, time: { gt: sequence } },
|
||||
orderBy: { time: 'asc' }
|
||||
});
|
||||
const data = await prisma.build.findFirst({ where: { id: buildId } });
|
||||
const createdAt = day(data.createdAt).utc();
|
||||
return {
|
||||
logs: logs.map(log => {
|
||||
log.time = Number(log.time)
|
||||
return log
|
||||
}),
|
||||
fromDb: true,
|
||||
took: day().diff(createdAt) / 1000,
|
||||
status: data?.status || 'queued'
|
||||
}
|
||||
}
|
||||
let fileLogs = (await fs.readFile(file)).toString()
|
||||
let decryptedLogs = await csv({ noheader: true }).fromString(fileLogs)
|
||||
let logs = decryptedLogs.map(log => {
|
||||
const parsed = {
|
||||
time: log['field1'],
|
||||
line: decrypt(log['field2'] + '","' + log['field3'])
|
||||
}
|
||||
return parsed
|
||||
}).filter(log => log.time > sequence)
|
||||
return {
|
||||
logs,
|
||||
fromDb: false,
|
||||
took: day().diff(createdAt) / 1000,
|
||||
status: data?.status || 'queued'
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
@@ -938,3 +1352,58 @@ export async function cancelDeployment(request: FastifyRequest<CancelDeployment>
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function createdBranchDatabase(database: any, baseDatabaseBranch: string, pullmergeRequestId: string) {
|
||||
try {
|
||||
if (!baseDatabaseBranch) return
|
||||
const { id, type, destinationDockerId, rootUser, rootUserPassword, dbUser } = database;
|
||||
if (destinationDockerId) {
|
||||
if (type === 'postgresql') {
|
||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} pg_dump -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/${baseDatabaseBranch}" --encoding=UTF8 --schema-only -f /tmp/${baseDatabaseBranch}.dump`
|
||||
})
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "CREATE DATABASE branch_${pullmergeRequestId}"`
|
||||
})
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} psql -d "postgresql://postgres:${decryptedRootUserPassword}@${id}:5432/branch_${pullmergeRequestId}" -f /tmp/${baseDatabaseBranch}.dump`
|
||||
})
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "ALTER DATABASE branch_${pullmergeRequestId} OWNER TO ${dbUser}"`
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function removeBranchDatabase(database: any, pullmergeRequestId: string) {
|
||||
try {
|
||||
const { id, type, destinationDockerId, rootUser, rootUserPassword } = database;
|
||||
if (destinationDockerId) {
|
||||
if (type === 'postgresql') {
|
||||
const decryptedRootUserPassword = decrypt(rootUserPassword);
|
||||
// Terminate all connections to the database
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE pg_stat_activity.datname = 'branch_${pullmergeRequestId}' AND pid <> pg_backend_pid();"`
|
||||
})
|
||||
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDockerId,
|
||||
command: `docker exec ${id} psql postgresql://postgres:${decryptedRootUserPassword}@${id}:5432 -c "DROP DATABASE branch_${pullmergeRequestId}"`
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { OnlyId } from '../../../../types';
|
||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildLogs, getBuildPack, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getSecrets, getStorages, getUsage, listApplications, newApplication, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication } from './handlers';
|
||||
import { cancelDeployment, checkDNS, checkDomain, checkRepository, cleanupUnconfiguredApplications, deleteApplication, deleteSecret, deleteStorage, deployApplication, getApplication, getApplicationLogs, getApplicationStatus, getBuildIdLogs, getBuildPack, getBuilds, getGitHubToken, getGitLabSSHKey, getImages, getPreviews, getPreviewStatus, getSecrets, getStorages, getUsage, listApplications, loadPreviews, newApplication, restartApplication, restartPreview, saveApplication, saveApplicationSettings, saveApplicationSource, saveBuildPack, saveConnectedDatabase, saveDeployKey, saveDestination, saveGitLabSSHKey, saveRepository, saveSecret, saveStorage, stopApplication, stopPreviewApplication, updatePreviewSecret, updateSecret } from './handlers';
|
||||
|
||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuildLogs, GetImages, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||
import type { CancelDeployment, CheckDNS, CheckDomain, CheckRepository, DeleteApplication, DeleteSecret, DeleteStorage, DeployApplication, GetApplicationLogs, GetBuildIdLogs, GetBuilds, GetImages, RestartPreviewApplication, SaveApplication, SaveApplicationSettings, SaveApplicationSource, SaveDeployKey, SaveDestination, SaveSecret, SaveStorage, StopPreviewApplication } from './types';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
@@ -11,6 +11,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/', async (request) => await listApplications(request));
|
||||
fastify.post<GetImages>('/images', async (request) => await getImages(request));
|
||||
|
||||
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredApplications(request));
|
||||
|
||||
fastify.post('/new', async (request, reply) => await newApplication(request, reply));
|
||||
|
||||
fastify.get<OnlyId>('/:id', async (request) => await getApplication(request));
|
||||
@@ -19,6 +21,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.get<OnlyId>('/:id/status', async (request) => await getApplicationStatus(request));
|
||||
|
||||
fastify.post<OnlyId>('/:id/restart', async (request, reply) => await restartApplication(request, reply));
|
||||
fastify.post<OnlyId>('/:id/stop', async (request, reply) => await stopApplication(request, reply));
|
||||
fastify.post<StopPreviewApplication>('/:id/stop/preview', async (request, reply) => await stopPreviewApplication(request, reply));
|
||||
|
||||
@@ -29,6 +32,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.get<OnlyId>('/:id/secrets', async (request) => await getSecrets(request));
|
||||
fastify.post<SaveSecret>('/:id/secrets', async (request, reply) => await saveSecret(request, reply));
|
||||
fastify.put<SaveSecret>('/:id/secrets', async (request, reply) => await updateSecret(request, reply));
|
||||
fastify.put<SaveSecret>('/:id/secrets/preview', async (request, reply) => await updatePreviewSecret(request, reply));
|
||||
fastify.delete<DeleteSecret>('/:id/secrets', async (request) => await deleteSecret(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/storages', async (request) => await getStorages(request));
|
||||
@@ -36,9 +41,12 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.delete<DeleteStorage>('/:id/storages', async (request) => await deleteStorage(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/previews', async (request) => await getPreviews(request));
|
||||
fastify.post<OnlyId>('/:id/previews/load', async (request) => await loadPreviews(request));
|
||||
fastify.get<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/status', async (request) => await getPreviewStatus(request));
|
||||
fastify.post<RestartPreviewApplication>('/:id/previews/:pullmergeRequestId/restart', async (request, reply) => await restartPreview(request, reply));
|
||||
|
||||
fastify.get<GetApplicationLogs>('/:id/logs', async (request) => await getApplicationLogs(request));
|
||||
fastify.get<GetBuildLogs>('/:id/logs/build', async (request) => await getBuildLogs(request));
|
||||
fastify.get<GetBuilds>('/:id/logs/build', async (request) => await getBuilds(request));
|
||||
fastify.get<GetBuildIdLogs>('/:id/logs/build/:buildId', async (request) => await getBuildIdLogs(request));
|
||||
|
||||
fastify.get('/:id/usage', async (request) => await getUsage(request))
|
||||
@@ -54,6 +62,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/:id/configuration/buildpack', async (request) => await getBuildPack(request));
|
||||
fastify.post('/:id/configuration/buildpack', async (request, reply) => await saveBuildPack(request, reply));
|
||||
|
||||
fastify.post('/:id/configuration/database', async (request, reply) => await saveConnectedDatabase(request, reply));
|
||||
|
||||
fastify.get<OnlyId>('/:id/configuration/sshkey', async (request) => await getGitLabSSHKey(request));
|
||||
fastify.post<OnlyId>('/:id/configuration/sshkey', async (request, reply) => await saveGitLabSSHKey(request, reply));
|
||||
|
||||
|
||||
@@ -20,15 +20,17 @@ export interface SaveApplication extends OnlyId {
|
||||
denoOptions: string,
|
||||
baseImage: string,
|
||||
baseBuildImage: string,
|
||||
deploymentType: string
|
||||
deploymentType: string,
|
||||
baseDatabaseBranch: string
|
||||
}
|
||||
}
|
||||
export interface SaveApplicationSettings extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; };
|
||||
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean, isCustomSSL: boolean };
|
||||
}
|
||||
export interface DeleteApplication extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Body: { force: boolean }
|
||||
}
|
||||
export interface CheckDomain extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
@@ -63,7 +65,7 @@ export interface SaveSecret extends OnlyId {
|
||||
name: string,
|
||||
value: string,
|
||||
isBuildSecret: boolean,
|
||||
isPRMRSecret: boolean,
|
||||
previewSecret: boolean,
|
||||
isNew: boolean
|
||||
}
|
||||
}
|
||||
@@ -87,7 +89,7 @@ export interface GetApplicationLogs extends OnlyId {
|
||||
since: number,
|
||||
}
|
||||
}
|
||||
export interface GetBuildLogs extends OnlyId {
|
||||
export interface GetBuilds extends OnlyId {
|
||||
Querystring: {
|
||||
buildId: string
|
||||
skip: number,
|
||||
@@ -95,6 +97,7 @@ export interface GetBuildLogs extends OnlyId {
|
||||
}
|
||||
export interface GetBuildIdLogs {
|
||||
Params: {
|
||||
id: string,
|
||||
buildId: string
|
||||
},
|
||||
Querystring: {
|
||||
@@ -124,4 +127,10 @@ export interface StopPreviewApplication extends OnlyId {
|
||||
Body: {
|
||||
pullmergeRequestId: string | null,
|
||||
}
|
||||
}
|
||||
export interface RestartPreviewApplication {
|
||||
Params: {
|
||||
id: string,
|
||||
pullmergeRequestId: string | null,
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
version,
|
||||
whiteLabeled: process.env.COOLIFY_WHITE_LABELED === 'true',
|
||||
whiteLabeledIcon: process.env.COOLIFY_WHITE_LABELED_ICON,
|
||||
isRegistrationEnabled: settings.isRegistrationEnabled,
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
|
||||
@@ -3,11 +3,11 @@ import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import yaml from 'js-yaml';
|
||||
import fs from 'fs/promises';
|
||||
import { ComposeFile, createDirectories, decrypt, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||
import { ComposeFile, createDirectories, decrypt, defaultComposeConfiguration, encrypt, errorHandler, executeDockerCmd, generateDatabaseConfiguration, generatePassword, getContainerUsage, getDatabaseImage, getDatabaseVersions, getFreePublicPort, listSettings, makeLabelForStandaloneDatabase, prisma, startTraefikTCPProxy, stopDatabaseContainer, stopTcpHttpProxy, supportedDatabaseTypesAndVersions, uniqueName, updatePasswordInDb } from '../../../../lib/common';
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
|
||||
import { GetDatabaseLogs, OnlyId, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSettings, SaveVersion } from '../../../../types';
|
||||
import { SaveDatabaseType } from './types';
|
||||
import type { OnlyId } from '../../../../types';
|
||||
import type { DeleteDatabase, DeleteDatabaseSecret, GetDatabaseLogs, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSecret, SaveDatabaseSettings, SaveDatabaseType, SaveVersion } from './types';
|
||||
|
||||
export async function listDatabases(request: FastifyRequest) {
|
||||
try {
|
||||
@@ -51,6 +51,30 @@ export async function newDatabase(request: FastifyRequest, reply: FastifyReply)
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function cleanupUnconfiguredDatabases(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
let databases = await prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
});
|
||||
for (const database of databases) {
|
||||
if (!database?.version) {
|
||||
const { id } = database;
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.database.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
@@ -61,16 +85,18 @@ export async function getDatabaseStatus(request: FastifyRequest<OnlyId>) {
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
const { destinationDockerId, destinationDocker } = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
if (database) {
|
||||
const { destinationDockerId, destinationDocker } = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker inspect --format '{{json .State}}' ${id}` })
|
||||
|
||||
if (JSON.parse(stdout).Running) {
|
||||
isRunning = true;
|
||||
if (JSON.parse(stdout).Running) {
|
||||
isRunning = true;
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
return {
|
||||
@@ -92,15 +118,14 @@ export async function getDatabase(request: FastifyRequest<OnlyId>) {
|
||||
if (!database) {
|
||||
throw { status: 404, message: 'Database not found.' }
|
||||
}
|
||||
const { arch } = await listSettings();
|
||||
const settings = await listSettings();
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
const configuration = generateDatabaseConfiguration(database, arch);
|
||||
const settings = await listSettings();
|
||||
const configuration = generateDatabaseConfiguration(database, settings.arch);
|
||||
return {
|
||||
privatePort: configuration?.privatePort,
|
||||
database,
|
||||
versions: await getDatabaseVersions(database.type, arch),
|
||||
versions: await getDatabaseVersions(database.type, settings.arch),
|
||||
settings
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
@@ -167,6 +192,7 @@ export async function saveDatabaseDestination(request: FastifyRequest<SaveDataba
|
||||
const { id } = request.params;
|
||||
const { destinationId } = request.body;
|
||||
|
||||
const { arch } = await listSettings();
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
data: { destinationDocker: { connect: { id: destinationId } } }
|
||||
@@ -181,7 +207,7 @@ export async function saveDatabaseDestination(request: FastifyRequest<SaveDataba
|
||||
|
||||
if (destinationDockerId) {
|
||||
if (type && version) {
|
||||
const baseImage = getDatabaseImage(type);
|
||||
const baseImage = getDatabaseImage(type, arch);
|
||||
executeDockerCmd({ dockerId, command: `docker pull ${baseImage}:${version}` })
|
||||
}
|
||||
}
|
||||
@@ -219,7 +245,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
include: { destinationDocker: true, settings: true, databaseSecret: true }
|
||||
});
|
||||
const { arch } = await listSettings();
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
@@ -229,7 +255,8 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
destinationDockerId,
|
||||
destinationDocker,
|
||||
publicPort,
|
||||
settings: { isPublic }
|
||||
settings: { isPublic },
|
||||
databaseSecret
|
||||
} = database;
|
||||
const { privatePort, command, environmentVariables, image, volume, ulimits } =
|
||||
generateDatabaseConfiguration(database, arch);
|
||||
@@ -239,7 +266,11 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
const labels = await makeLabelForStandaloneDatabase({ id, image, volume });
|
||||
|
||||
const { workdir } = await createDirectories({ repository: type, buildId: id });
|
||||
|
||||
if (databaseSecret.length > 0) {
|
||||
databaseSecret.forEach((secret) => {
|
||||
environmentVariables[secret.name] = decrypt(secret.value);
|
||||
});
|
||||
}
|
||||
const composeFile: ComposeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
@@ -247,20 +278,11 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
container_name: id,
|
||||
image,
|
||||
command,
|
||||
networks: [network],
|
||||
environment: environmentVariables,
|
||||
volumes: [volume],
|
||||
ulimits,
|
||||
labels,
|
||||
restart: 'always',
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 3,
|
||||
window: '120s'
|
||||
}
|
||||
}
|
||||
...defaultComposeConfiguration(network),
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
@@ -270,28 +292,16 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
},
|
||||
volumes: {
|
||||
[volumeName]: {
|
||||
external: true
|
||||
name: volumeName,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const composeFileDestination = `${workdir}/docker-compose.yaml`;
|
||||
await fs.writeFile(composeFileDestination, yaml.dump(composeFile));
|
||||
try {
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker volume create ${volumeName}` })
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
try {
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
return {};
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
throw {
|
||||
error
|
||||
};
|
||||
}
|
||||
await executeDockerCmd({ dockerId: destinationDocker.id, command: `docker compose -f ${composeFileDestination} up -d` })
|
||||
if (isPublic) await startTraefikTCPProxy(destinationDocker, id, publicPort, privatePort);
|
||||
return {};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
@@ -360,21 +370,25 @@ export async function getDatabaseLogs(request: FastifyRequest<GetDatabaseLogs>)
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteDatabase(request: FastifyRequest<OnlyId>) {
|
||||
export async function deleteDatabase(request: FastifyRequest<DeleteDatabase>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
const { force } = request.body;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
if (!force) {
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted) await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
}
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.database.delete({ where: { id } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
@@ -435,10 +449,10 @@ export async function saveDatabaseSettings(request: FastifyRequest<SaveDatabaseS
|
||||
|
||||
let publicPort = null
|
||||
|
||||
const { destinationDocker: { id: dockerId } } = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
|
||||
const { destinationDocker: { remoteEngine, engine, remoteIpAddress } } = await prisma.database.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
|
||||
if (isPublic) {
|
||||
publicPort = await getFreePublicPort(id, dockerId);
|
||||
publicPort = await getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress });
|
||||
}
|
||||
await prisma.database.update({
|
||||
where: { id },
|
||||
@@ -470,4 +484,69 @@ export async function saveDatabaseSettings(request: FastifyRequest<SaveDatabaseS
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function getDatabaseSecrets(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let secrets = await prisma.databaseSecret.findMany({
|
||||
where: { databaseId: id },
|
||||
orderBy: { createdAt: 'desc' }
|
||||
});
|
||||
secrets = secrets.map((secret) => {
|
||||
secret.value = decrypt(secret.value);
|
||||
return secret;
|
||||
});
|
||||
|
||||
return {
|
||||
secrets
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveDatabaseSecret(request: FastifyRequest<SaveDatabaseSecret>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, value, isNew } = request.body
|
||||
|
||||
if (isNew) {
|
||||
const found = await prisma.databaseSecret.findFirst({ where: { name, databaseId: id } });
|
||||
if (found) {
|
||||
throw `Secret ${name} already exists.`
|
||||
} else {
|
||||
value = encrypt(value.trim());
|
||||
await prisma.databaseSecret.create({
|
||||
data: { name, value, database: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
value = encrypt(value.trim());
|
||||
const found = await prisma.databaseSecret.findFirst({ where: { databaseId: id, name } });
|
||||
|
||||
if (found) {
|
||||
await prisma.databaseSecret.updateMany({
|
||||
where: { databaseId: id, name },
|
||||
data: { value }
|
||||
});
|
||||
} else {
|
||||
await prisma.databaseSecret.create({
|
||||
data: { name, value, database: { connect: { id } } }
|
||||
});
|
||||
}
|
||||
}
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteDatabaseSecret(request: FastifyRequest<DeleteDatabaseSecret>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { name } = request.body
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: id, name } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { deleteDatabase, getDatabase, getDatabaseLogs, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
|
||||
import { cleanupUnconfiguredDatabases, deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers';
|
||||
|
||||
import type { GetDatabaseLogs, OnlyId, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSettings, SaveVersion } from '../../../../types';
|
||||
import type { SaveDatabaseType } from './types';
|
||||
import type { OnlyId } from '../../../../types';
|
||||
|
||||
import type { DeleteDatabase, SaveDatabaseType, DeleteDatabaseSecret, GetDatabaseLogs, SaveDatabase, SaveDatabaseDestination, SaveDatabaseSecret, SaveDatabaseSettings, SaveVersion } from './types';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
@@ -11,14 +12,20 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/', async (request) => await listDatabases(request));
|
||||
fastify.post('/new', async (request, reply) => await newDatabase(request, reply));
|
||||
|
||||
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredDatabases(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id', async (request) => await getDatabase(request));
|
||||
fastify.post<SaveDatabase>('/:id', async (request, reply) => await saveDatabase(request, reply));
|
||||
fastify.delete<OnlyId>('/:id', async (request) => await deleteDatabase(request));
|
||||
fastify.delete<DeleteDatabase>('/:id', async (request) => await deleteDatabase(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/status', async (request) => await getDatabaseStatus(request));
|
||||
|
||||
fastify.post<SaveDatabaseSettings>('/:id/settings', async (request) => await saveDatabaseSettings(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id/secrets', async (request) => await getDatabaseSecrets(request));
|
||||
fastify.post<SaveDatabaseSecret>('/:id/secrets', async (request, reply) => await saveDatabaseSecret(request, reply));
|
||||
fastify.delete<DeleteDatabaseSecret>('/:id/secrets', async (request) => await deleteDatabaseSecret(request));
|
||||
|
||||
fastify.get('/:id/configuration/type', async (request) => await getDatabaseTypes(request));
|
||||
fastify.post<SaveDatabaseType>('/:id/configuration/type', async (request, reply) => await saveDatabaseType(request, reply));
|
||||
|
||||
|
||||
@@ -2,4 +2,54 @@ import type { OnlyId } from "../../../../types";
|
||||
|
||||
export interface SaveDatabaseType extends OnlyId {
|
||||
Body: { type: string }
|
||||
}
|
||||
}
|
||||
export interface DeleteDatabase extends OnlyId {
|
||||
Body: { force: string }
|
||||
}
|
||||
export interface SaveVersion extends OnlyId {
|
||||
Body: {
|
||||
version: string
|
||||
}
|
||||
}
|
||||
export interface SaveDatabaseDestination extends OnlyId {
|
||||
Body: {
|
||||
destinationId: string
|
||||
}
|
||||
}
|
||||
export interface GetDatabaseLogs extends OnlyId {
|
||||
Querystring: {
|
||||
since: number
|
||||
}
|
||||
}
|
||||
export interface SaveDatabase extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
defaultDatabase: string,
|
||||
dbUser: string,
|
||||
dbUserPassword: string,
|
||||
rootUser: string,
|
||||
rootUserPassword: string,
|
||||
version: string,
|
||||
isRunning: boolean
|
||||
}
|
||||
}
|
||||
export interface SaveDatabaseSettings extends OnlyId {
|
||||
Body: {
|
||||
isPublic: boolean,
|
||||
appendOnly: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SaveDatabaseSecret extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
value: string,
|
||||
isNew: string,
|
||||
}
|
||||
}
|
||||
export interface DeleteDatabaseSecret extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@ export async function listDestinations(request: FastifyRequest<ListDestinations>
|
||||
destinations
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
console.log({ status, message })
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -114,7 +113,6 @@ export async function newDestination(request: FastifyRequest<NewDestination>, re
|
||||
}
|
||||
|
||||
} catch ({ status, message }) {
|
||||
console.log({ status, message })
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -162,7 +160,6 @@ export async function startProxy(request: FastifyRequest<Proxy>) {
|
||||
await startTraefikProxy(id);
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
console.log({ status, message })
|
||||
await stopTraefikProxy(id);
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
@@ -205,23 +202,21 @@ export async function assignSSHKey(request: FastifyRequest) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function verifyRemoteDockerEngine(request: FastifyRequest, reply: FastifyReply) {
|
||||
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
await createRemoteEngineConfiguration(id);
|
||||
|
||||
const { remoteIpAddress, remoteUser, network } = await prisma.destinationDocker.findFirst({ where: { id } })
|
||||
const { remoteIpAddress, remoteUser, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
||||
const host = `ssh://${remoteUser}@${remoteIpAddress}`
|
||||
const { stdout } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=${network}' --no-trunc --format "{{json .}}"`);
|
||||
|
||||
if (!stdout) {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable ${network}`);
|
||||
}
|
||||
const { stdout:coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
|
||||
|
||||
const { stdout: coolifyNetwork } = await asyncExecShell(`DOCKER_HOST=${host} docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`);
|
||||
if (!coolifyNetwork) {
|
||||
await asyncExecShell(`DOCKER_HOST=${host} docker network create --attachable coolify-infra`);
|
||||
}
|
||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
||||
return reply.code(201).send()
|
||||
|
||||
@@ -234,7 +229,7 @@ export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const destination = await prisma.destinationDocker.findUnique({ where: { id } })
|
||||
const isRunning = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy' })
|
||||
const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
|
||||
return {
|
||||
isRunning
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.post('/:id/configuration/sshKey', async (request) => await assignSSHKey(request));
|
||||
|
||||
fastify.post('/:id/verify', async (request, reply) => await verifyRemoteDockerEngine(request, reply));
|
||||
fastify.post<OnlyId>('/:id/verify', async (request, reply) => await verifyRemoteDockerEngine(request, reply));
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
@@ -1,50 +1,64 @@
|
||||
import os from 'node:os';
|
||||
import osu from 'node-os-utils';
|
||||
import axios from 'axios';
|
||||
import { compareVersions } from 'compare-versions';
|
||||
import cuid from 'cuid';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { asyncExecShell, asyncSleep, cleanupDockerStorage, errorHandler, isDev, listSettings, prisma, uniqueName, version } from '../../../lib/common';
|
||||
import { supportedServiceTypesAndVersions } from '../../../lib/services/supportedVersions';
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import type { Login, Update } from '.';
|
||||
import type { GetCurrentUser } from './types';
|
||||
import axios from "axios";
|
||||
import { compareVersions } from "compare-versions";
|
||||
import cuid from "cuid";
|
||||
import bcrypt from "bcryptjs";
|
||||
import {
|
||||
asyncExecShell,
|
||||
asyncSleep,
|
||||
cleanupDockerStorage,
|
||||
errorHandler,
|
||||
isDev,
|
||||
listSettings,
|
||||
prisma,
|
||||
uniqueName,
|
||||
version,
|
||||
} from "../../../lib/common";
|
||||
import { supportedServiceTypesAndVersions } from "../../../lib/services/supportedVersions";
|
||||
import { scheduler } from "../../../lib/scheduler";
|
||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||
import type { Login, Update } from ".";
|
||||
import type { GetCurrentUser } from "./types";
|
||||
|
||||
export async function hashPassword(password: string): Promise<string> {
|
||||
const saltRounds = 15;
|
||||
return bcrypt.hash(password, saltRounds);
|
||||
}
|
||||
|
||||
export async function cleanupManually() {
|
||||
export async function cleanupManually(request: FastifyRequest) {
|
||||
try {
|
||||
const destination = await prisma.destinationDocker.findFirst({ where: { engine: '/var/run/docker.sock' } })
|
||||
await cleanupDockerStorage(destination.id, true, true)
|
||||
return {}
|
||||
const { serverId } = request.body;
|
||||
const destination = await prisma.destinationDocker.findUnique({
|
||||
where: { id: serverId },
|
||||
});
|
||||
await cleanupDockerStorage(destination.id, true, true);
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function checkUpdate(request: FastifyRequest) {
|
||||
try {
|
||||
const isStaging = request.hostname === 'staging.coolify.io'
|
||||
const isStaging =
|
||||
request.hostname === "staging.coolify.io" ||
|
||||
request.hostname === "arm.coolify.io";
|
||||
const currentVersion = version;
|
||||
const { data: versions } = await axios.get(
|
||||
`https://get.coollabs.io/versions.json?appId=${process.env['COOLIFY_APP_ID']}&version=${currentVersion}`
|
||||
`https://get.coollabs.io/versions.json?appId=${process.env["COOLIFY_APP_ID"]}&version=${currentVersion}`
|
||||
);
|
||||
const latestVersion = versions['coolify'].main.version
|
||||
const latestVersion = versions["coolify"].main.version;
|
||||
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||
if (isStaging) {
|
||||
return {
|
||||
isUpdateAvailable: true,
|
||||
latestVersion: 'next'
|
||||
}
|
||||
latestVersion: "next",
|
||||
};
|
||||
}
|
||||
return {
|
||||
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
|
||||
latestVersion
|
||||
latestVersion,
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,168 +66,206 @@ export async function update(request: FastifyRequest<Update>) {
|
||||
const { latestVersion } = request.body;
|
||||
try {
|
||||
if (!isDev) {
|
||||
const { isAutoUpdateEnabled } = (await prisma.setting.findFirst()) || {
|
||||
isAutoUpdateEnabled: false
|
||||
};
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
await asyncExecShell(`docker pull coollabsio/coolify:${latestVersion}`);
|
||||
await asyncExecShell(`env | grep COOLIFY > .env`);
|
||||
await asyncExecShell(
|
||||
`sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
);
|
||||
await asyncExecShell(
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify && docker rm coolify && docker compose up -d --force-recreate"`
|
||||
`docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
);
|
||||
return {};
|
||||
} else {
|
||||
console.log(latestVersion);
|
||||
await asyncSleep(2000);
|
||||
return {};
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function resetQueue(request: FastifyRequest<any>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
if (teamId === "0") {
|
||||
await prisma.build.updateMany({
|
||||
where: { status: { in: ["queued", "running"] } },
|
||||
data: { status: "canceled" },
|
||||
});
|
||||
scheduler.workers.get("deployApplication").postMessage("cancel");
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function restartCoolify(request: FastifyRequest<any>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
if (teamId === '0') {
|
||||
if (teamId === "0") {
|
||||
if (!isDev) {
|
||||
await asyncExecShell(`docker restart coolify`);
|
||||
asyncExecShell(`docker restart coolify`);
|
||||
return {};
|
||||
} else {
|
||||
console.log('Restarting Coolify')
|
||||
return {};
|
||||
}
|
||||
}
|
||||
throw { status: 500, message: 'You are not authorized to restart Coolify.' };
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function showUsage() {
|
||||
try {
|
||||
return {
|
||||
usage: {
|
||||
uptime: os.uptime(),
|
||||
memory: await osu.mem.info(),
|
||||
cpu: {
|
||||
load: os.loadavg(),
|
||||
usage: await osu.cpu.usage(),
|
||||
count: os.cpus().length
|
||||
},
|
||||
disk: await osu.drive.info('/')
|
||||
}
|
||||
|
||||
throw {
|
||||
status: 500,
|
||||
message: "You are not authorized to restart Coolify.",
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function showDashboard(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
const applications = await prisma.application.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { settings: true }
|
||||
let applications = await prisma.application.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
});
|
||||
const databases = await prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { settings: true }
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
});
|
||||
const services = await prisma.service.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } }
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, teams: true },
|
||||
});
|
||||
const gitSources = await prisma.gitSource.findMany({
|
||||
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { teams: true },
|
||||
});
|
||||
const destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { teams: true },
|
||||
});
|
||||
const settings = await listSettings();
|
||||
|
||||
let foundUnconfiguredApplication = false;
|
||||
for (const application of applications) {
|
||||
if (!application.buildPack || !application.destinationDockerId || !application.branch || (!application.settings?.isBot && !application?.fqdn)) {
|
||||
foundUnconfiguredApplication = true
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredService = false;
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
foundUnconfiguredService = true
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredDatabase = false;
|
||||
for (const database of databases) {
|
||||
if (!database.version) {
|
||||
foundUnconfiguredDatabase = true
|
||||
}
|
||||
}
|
||||
return {
|
||||
foundUnconfiguredApplication,
|
||||
foundUnconfiguredDatabase,
|
||||
foundUnconfiguredService,
|
||||
applications,
|
||||
databases,
|
||||
services,
|
||||
gitSources,
|
||||
destinations,
|
||||
settings,
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
|
||||
export async function login(
|
||||
request: FastifyRequest<Login>,
|
||||
reply: FastifyReply
|
||||
) {
|
||||
if (request.user) {
|
||||
return reply.redirect('/dashboard');
|
||||
return reply.redirect("/dashboard");
|
||||
} else {
|
||||
const { email, password, isLogin } = request.body || {};
|
||||
if (!email || !password) {
|
||||
throw { status: 500, message: 'Email and password are required.' };
|
||||
throw { status: 500, message: "Email and password are required." };
|
||||
}
|
||||
const users = await prisma.user.count();
|
||||
const userFound = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { teams: true, permission: true },
|
||||
rejectOnNotFound: false
|
||||
rejectOnNotFound: false,
|
||||
});
|
||||
if (!userFound && isLogin) {
|
||||
throw { status: 500, message: 'User not found.' };
|
||||
throw { status: 500, message: "User not found." };
|
||||
}
|
||||
const { isRegistrationEnabled, id } = await prisma.setting.findFirst()
|
||||
const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
|
||||
let uid = cuid();
|
||||
let permission = 'read';
|
||||
let permission = "read";
|
||||
let isAdmin = false;
|
||||
|
||||
if (users === 0) {
|
||||
await prisma.setting.update({ where: { id }, data: { isRegistrationEnabled: false } });
|
||||
uid = '0';
|
||||
await prisma.setting.update({
|
||||
where: { id },
|
||||
data: { isRegistrationEnabled: false },
|
||||
});
|
||||
uid = "0";
|
||||
}
|
||||
if (userFound) {
|
||||
if (userFound.type === 'email') {
|
||||
if (userFound.password === 'RESETME') {
|
||||
if (userFound.type === "email") {
|
||||
if (userFound.password === "RESETME") {
|
||||
const hashedPassword = await hashPassword(password);
|
||||
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
|
||||
if (userFound.id === '0') {
|
||||
if (userFound.id === "0") {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: 'RESETME' }
|
||||
data: { password: "RESETME" },
|
||||
});
|
||||
} else {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: 'RESETTIMEOUT' }
|
||||
data: { password: "RESETTIMEOUT" },
|
||||
});
|
||||
}
|
||||
|
||||
throw {
|
||||
status: 500,
|
||||
message: 'Password reset link has expired. Please request a new one.'
|
||||
message:
|
||||
"Password reset link has expired. Please request a new one.",
|
||||
};
|
||||
} else {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: hashedPassword }
|
||||
data: { password: hashedPassword },
|
||||
});
|
||||
return {
|
||||
userId: userFound.id,
|
||||
teamId: userFound.id,
|
||||
permission: userFound.permission,
|
||||
isAdmin: true
|
||||
isAdmin: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const passwordMatch = await bcrypt.compare(password, userFound.password);
|
||||
const passwordMatch = await bcrypt.compare(
|
||||
password,
|
||||
userFound.password
|
||||
);
|
||||
if (!passwordMatch) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: 'Wrong password or email address.'
|
||||
message: "Wrong password or email address.",
|
||||
};
|
||||
}
|
||||
uid = userFound.id;
|
||||
isAdmin = true;
|
||||
}
|
||||
} else {
|
||||
permission = 'owner';
|
||||
permission = "owner";
|
||||
isAdmin = true;
|
||||
if (!isRegistrationEnabled) {
|
||||
throw {
|
||||
status: 404,
|
||||
message: 'Registration disabled by administrator.'
|
||||
message: "Registration disabled by administrator.",
|
||||
};
|
||||
}
|
||||
const hashedPassword = await hashPassword(password);
|
||||
@@ -223,17 +275,17 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: 'email',
|
||||
type: "email",
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName(),
|
||||
destinationDocker: { connect: { network: 'coolify' } }
|
||||
}
|
||||
destinationDocker: { connect: { network: "coolify" } },
|
||||
},
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
permission: { create: { teamId: uid, permission: "owner" } },
|
||||
},
|
||||
include: { teams: true }
|
||||
include: { teams: true },
|
||||
});
|
||||
} else {
|
||||
await prisma.user.create({
|
||||
@@ -241,16 +293,16 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: 'email',
|
||||
type: "email",
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName()
|
||||
}
|
||||
name: uniqueName(),
|
||||
},
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
permission: { create: { teamId: uid, permission: "owner" } },
|
||||
},
|
||||
include: { teams: true }
|
||||
include: { teams: true },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -258,18 +310,21 @@ export async function login(request: FastifyRequest<Login>, reply: FastifyReply)
|
||||
userId: uid,
|
||||
teamId: uid,
|
||||
permission,
|
||||
isAdmin
|
||||
isAdmin,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) {
|
||||
let token = null
|
||||
const { teamId } = request.query
|
||||
export async function getCurrentUser(
|
||||
request: FastifyRequest<GetCurrentUser>,
|
||||
fastify
|
||||
) {
|
||||
let token = null;
|
||||
const { teamId } = request.query;
|
||||
try {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: request.user.userId }
|
||||
})
|
||||
where: { id: request.user.userId },
|
||||
});
|
||||
if (!user) {
|
||||
throw "User not found";
|
||||
}
|
||||
@@ -280,20 +335,20 @@ export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fa
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: { id: request.user.userId, teams: { some: { id: teamId } } },
|
||||
include: { teams: true, permission: true }
|
||||
})
|
||||
include: { teams: true, permission: true },
|
||||
});
|
||||
if (user) {
|
||||
const permission = user.permission.find(p => p.teamId === teamId).permission
|
||||
const permission = user.permission.find(
|
||||
(p) => p.teamId === teamId
|
||||
).permission;
|
||||
const payload = {
|
||||
...request.user,
|
||||
teamId,
|
||||
permission: permission || null,
|
||||
isAdmin: permission === 'owner' || permission === 'admin'
|
||||
|
||||
}
|
||||
token = fastify.jwt.sign(payload)
|
||||
isAdmin: permission === "owner" || permission === "admin",
|
||||
};
|
||||
token = fastify.jwt.sign(payload);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
// No new token -> not switching teams
|
||||
}
|
||||
@@ -302,6 +357,6 @@ export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fa
|
||||
settings: await prisma.setting.findFirst(),
|
||||
supportedServiceTypesAndVersions,
|
||||
token,
|
||||
...request.user
|
||||
}
|
||||
...request.user,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { checkUpdate, login, showDashboard, update, showUsage, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
|
||||
import { checkUpdate, login, showDashboard, update, resetQueue, getCurrentUser, cleanupManually, restartCoolify } from './handlers';
|
||||
import { GetCurrentUser } from './types';
|
||||
import pump from 'pump'
|
||||
import fs from 'fs'
|
||||
import { asyncExecShell, encrypt, errorHandler, prisma } from '../../../lib/common';
|
||||
|
||||
export interface Update {
|
||||
Body: { latestVersion: string }
|
||||
@@ -23,9 +26,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await getCurrentUser(request, fastify));
|
||||
|
||||
fastify.get('/undead', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async function () {
|
||||
fastify.get('/undead', async function () {
|
||||
return { message: 'nope' };
|
||||
});
|
||||
|
||||
@@ -43,17 +44,17 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await showDashboard(request));
|
||||
|
||||
fastify.get('/usage', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async () => await showUsage());
|
||||
|
||||
fastify.post('/internal/restart', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await restartCoolify(request));
|
||||
|
||||
fastify.post('/internal/resetQueue', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async (request) => await resetQueue(request));
|
||||
|
||||
fastify.post('/internal/cleanup', {
|
||||
onRequest: [fastify.authenticate]
|
||||
}, async () => await cleanupManually());
|
||||
}, async (request) => await cleanupManually(request));
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
116
apps/api/src/routes/api/v1/servers/handlers.ts
Normal file
116
apps/api/src/routes/api/v1/servers/handlers.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { errorHandler, executeDockerCmd, prisma, createRemoteEngineConfiguration, executeSSHCmd } from '../../../../lib/common';
|
||||
import os from 'node:os';
|
||||
import osu from 'node-os-utils';
|
||||
|
||||
|
||||
export async function listServers(request: FastifyRequest) {
|
||||
try {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
const servers = await prisma.destinationDocker.findMany({ where: { teams: { some: { id: teamId === '0' ? undefined : teamId } }}, distinct: ['remoteIpAddress', 'engine'] })
|
||||
return {
|
||||
servers
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
const mappingTable = [
|
||||
['K total memory', 'totalMemoryKB'],
|
||||
['K used memory', 'usedMemoryKB'],
|
||||
['K active memory', 'activeMemoryKB'],
|
||||
['K inactive memory', 'inactiveMemoryKB'],
|
||||
['K free memory', 'freeMemoryKB'],
|
||||
['K buffer memory', 'bufferMemoryKB'],
|
||||
['K swap cache', 'swapCacheKB'],
|
||||
['K total swap', 'totalSwapKB'],
|
||||
['K used swap', 'usedSwapKB'],
|
||||
['K free swap', 'freeSwapKB'],
|
||||
['non-nice user cpu ticks', 'nonNiceUserCpuTicks'],
|
||||
['nice user cpu ticks', 'niceUserCpuTicks'],
|
||||
['system cpu ticks', 'systemCpuTicks'],
|
||||
['idle cpu ticks', 'idleCpuTicks'],
|
||||
['IO-wait cpu ticks', 'ioWaitCpuTicks'],
|
||||
['IRQ cpu ticks', 'irqCpuTicks'],
|
||||
['softirq cpu ticks', 'softIrqCpuTicks'],
|
||||
['stolen cpu ticks', 'stolenCpuTicks'],
|
||||
['pages paged in', 'pagesPagedIn'],
|
||||
['pages paged out', 'pagesPagedOut'],
|
||||
['pages swapped in', 'pagesSwappedIn'],
|
||||
['pages swapped out', 'pagesSwappedOut'],
|
||||
['interrupts', 'interrupts'],
|
||||
['CPU context switches', 'cpuContextSwitches'],
|
||||
['boot time', 'bootTime'],
|
||||
['forks', 'forks']
|
||||
];
|
||||
function parseFromText(text) {
|
||||
var data = {};
|
||||
var lines = text.split(/\r?\n/);
|
||||
for (const line of lines) {
|
||||
for (const [key, value] of mappingTable) {
|
||||
if (line.indexOf(key) >= 0) {
|
||||
const values = line.match(/[0-9]+/)[0];
|
||||
data[value] = parseInt(values, 10);
|
||||
}
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
export async function showUsage(request: FastifyRequest) {
|
||||
const { id } = request.params;
|
||||
let { remoteEngine } = request.query
|
||||
remoteEngine = remoteEngine === 'true' ? true : false
|
||||
if (remoteEngine) {
|
||||
const { stdout: stats } = await executeSSHCmd({ dockerId: id, command: `vmstat -s` })
|
||||
const { stdout: disks } = await executeSSHCmd({ dockerId: id, command: `df -m / --output=size,used,pcent|grep -v 'Used'| xargs` })
|
||||
const { stdout: cpus } = await executeSSHCmd({ dockerId: id, command: `nproc --all` })
|
||||
const { stdout: cpuUsage } = await executeSSHCmd({ dockerId: id, command: `echo $[100-$(vmstat 1 2|tail -1|awk '{print $15}')]` })
|
||||
const parsed: any = parseFromText(stats)
|
||||
return {
|
||||
usage: {
|
||||
uptime: parsed.bootTime / 1024,
|
||||
memory: {
|
||||
totalMemMb: parsed.totalMemoryKB / 1024,
|
||||
usedMemMb: parsed.usedMemoryKB / 1024,
|
||||
freeMemMb: parsed.freeMemoryKB / 1024,
|
||||
usedMemPercentage: (parsed.usedMemoryKB / parsed.totalMemoryKB) * 100,
|
||||
freeMemPercentage: (parsed.totalMemoryKB - parsed.usedMemoryKB) / parsed.totalMemoryKB * 100
|
||||
},
|
||||
cpu: {
|
||||
load: [0,0,0],
|
||||
usage: cpuUsage,
|
||||
count: cpus
|
||||
},
|
||||
disk: {
|
||||
totalGb: (disks.split(' ')[0] / 1024).toFixed(1),
|
||||
usedGb: (disks.split(' ')[1] / 1024).toFixed(1),
|
||||
freeGb: (disks.split(' ')[0] - disks.split(' ')[1]).toFixed(1),
|
||||
usedPercentage: disks.split(' ')[2].replace('%', ''),
|
||||
freePercentage: 100 - disks.split(' ')[2].replace('%', '')
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return {
|
||||
usage: {
|
||||
uptime: os.uptime(),
|
||||
memory: await osu.mem.info(),
|
||||
cpu: {
|
||||
load: os.loadavg(),
|
||||
usage: await osu.cpu.usage(),
|
||||
count: os.cpus().length
|
||||
},
|
||||
disk: await osu.drive.info('/')
|
||||
}
|
||||
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
14
apps/api/src/routes/api/v1/servers/index.ts
Normal file
14
apps/api/src/routes/api/v1/servers/index.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { listServers, showUsage } from './handlers';
|
||||
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listServers(request));
|
||||
fastify.get('/usage/:id', async (request) => await showUsage(request));
|
||||
|
||||
};
|
||||
|
||||
export default root;
|
||||
27
apps/api/src/routes/api/v1/servers/types.ts
Normal file
27
apps/api/src/routes/api/v1/servers/types.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { OnlyId } from "../../../../types"
|
||||
|
||||
export interface SaveTeam extends OnlyId {
|
||||
Body: {
|
||||
name: string
|
||||
}
|
||||
}
|
||||
export interface InviteToTeam {
|
||||
Body: {
|
||||
email: string,
|
||||
permission: string,
|
||||
teamId: string,
|
||||
teamName: string
|
||||
}
|
||||
}
|
||||
export interface BodyId {
|
||||
Body: {
|
||||
id: string
|
||||
}
|
||||
}
|
||||
export interface SetPermission {
|
||||
Body: {
|
||||
userId: string,
|
||||
newPermission: string,
|
||||
permissionId: string
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,13 @@
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { prisma, uniqueName, asyncExecShell, getServiceImage, getServiceFromDB, getContainerUsage,isDomainConfigured, saveUpdateableFields, fixType, decrypt, encrypt, getServiceMainPort, createDirectories, ComposeFile, makeLabelForServices, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, persistentVolumes, asyncSleep, isARM, defaultComposeConfiguration, checkExposedPort } from '../../../../lib/common';
|
||||
import { prisma, uniqueName, asyncExecShell, getServiceFromDB, getContainerUsage, isDomainConfigured, saveUpdateableFields, fixType, decrypt, encrypt, ComposeFile, getFreePublicPort, getDomain, errorHandler, generatePassword, isDev, stopTcpHttpProxy, executeDockerCmd, checkDomainsIsValidInDNS, checkExposedPort, listSettings } from '../../../../lib/common';
|
||||
import { day } from '../../../../lib/dayjs';
|
||||
import { checkContainer, isContainerExited, removeContainer } from '../../../../lib/docker';
|
||||
import { checkContainer, isContainerExited } from '../../../../lib/docker';
|
||||
import cuid from 'cuid';
|
||||
|
||||
import type { OnlyId } from '../../../../types';
|
||||
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetWordpressSettings } from './types';
|
||||
import { defaultServiceConfigurations } from '../../../../lib/services';
|
||||
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types';
|
||||
import { supportedServiceTypesAndVersions } from '../../../../lib/services/supportedVersions';
|
||||
import { configureServiceType, removeService } from '../../../../lib/services/common';
|
||||
|
||||
@@ -38,6 +36,33 @@ export async function newService(request: FastifyRequest, reply: FastifyReply) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function cleanupUnconfiguredServices(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
let services = await prisma.service.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, teams: true },
|
||||
});
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
if (service.destinationDockerId) {
|
||||
await executeDockerCmd({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker stop -t 0`
|
||||
})
|
||||
await executeDockerCmd({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}|xargs -r -n 1 docker rm --force`
|
||||
})
|
||||
}
|
||||
await removeService({ id: service.id });
|
||||
}
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
@@ -45,13 +70,17 @@ export async function getServiceStatus(request: FastifyRequest<OnlyId>) {
|
||||
|
||||
let isRunning = false;
|
||||
let isExited = false
|
||||
|
||||
let isRestarting = false;
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
const { destinationDockerId, settings } = service;
|
||||
|
||||
if (destinationDockerId) {
|
||||
isRunning = await checkContainer({ dockerId: service.destinationDocker.id, container: id });
|
||||
isExited = await isContainerExited(service.destinationDocker.id, id);
|
||||
const status = await checkContainer({ dockerId: service.destinationDocker.id, container: id });
|
||||
if (status?.found) {
|
||||
isRunning = status.status.isRunning;
|
||||
isExited = status.status.isExited;
|
||||
isRestarting = status.status.isRestarting
|
||||
}
|
||||
}
|
||||
return {
|
||||
isRunning,
|
||||
@@ -72,6 +101,7 @@ export async function getService(request: FastifyRequest<OnlyId>) {
|
||||
throw { status: 404, message: 'Service not found.' }
|
||||
}
|
||||
return {
|
||||
settings: await listSettings(),
|
||||
service
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
@@ -234,7 +264,7 @@ export async function checkService(request: FastifyRequest<CheckService>) {
|
||||
if (otherFqdns && otherFqdns.length > 0) otherFqdns = otherFqdns.map((f) => f.toLowerCase());
|
||||
if (exposePort) exposePort = Number(exposePort);
|
||||
|
||||
const { destinationDocker: { id: dockerId, remoteIpAddress, remoteEngine }, exposePort: configuredPort } = await prisma.service.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
const { destinationDocker: { remoteIpAddress, remoteEngine, engine }, exposePort: configuredPort } = await prisma.service.findUnique({ where: { id }, include: { destinationDocker: true } })
|
||||
const { isDNSCheckEnabled } = await prisma.setting.findFirst({});
|
||||
|
||||
let found = await isDomainConfigured({ id, fqdn, remoteIpAddress });
|
||||
@@ -249,7 +279,7 @@ export async function checkService(request: FastifyRequest<CheckService>) {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, dockerId, remoteIpAddress })
|
||||
if (exposePort) await checkExposedPort({ id, configuredPort, exposePort, engine, remoteEngine, remoteIpAddress })
|
||||
if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||
let hostname = request.hostname.split(':')[0];
|
||||
if (remoteEngine) hostname = remoteIpAddress;
|
||||
@@ -269,7 +299,6 @@ export async function saveService(request: FastifyRequest<SaveService>, reply: F
|
||||
if (exposePort) exposePort = Number(exposePort);
|
||||
|
||||
type = fixType(type)
|
||||
|
||||
const update = saveUpdateableFields(type, request.body[type])
|
||||
const data = {
|
||||
fqdn,
|
||||
@@ -400,17 +429,33 @@ export async function deleteServiceStorage(request: FastifyRequest<DeleteService
|
||||
}
|
||||
}
|
||||
|
||||
export async function setSettingsService(request: FastifyRequest<ServiceStartStop & SetWordpressSettings>, reply: FastifyReply) {
|
||||
export async function setSettingsService(request: FastifyRequest<ServiceStartStop & SetWordpressSettings & SetGlitchTipSettings>, reply: FastifyReply) {
|
||||
try {
|
||||
const { type } = request.params
|
||||
if (type === 'wordpress') {
|
||||
return await setWordpressSettings(request, reply)
|
||||
}
|
||||
if (type === 'glitchtip') {
|
||||
return await setGlitchTipSettings(request, reply)
|
||||
}
|
||||
throw `Service type ${type} not supported.`
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
async function setGlitchTipSettings(request: FastifyRequest<SetGlitchTipSettings>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { enableOpenUserRegistration, emailSmtpUseSsl, emailSmtpUseTls } = request.body
|
||||
await prisma.glitchTip.update({
|
||||
where: { serviceId: id },
|
||||
data: { enableOpenUserRegistration, emailSmtpUseSsl, emailSmtpUseTls }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
async function setWordpressSettings(request: FastifyRequest<ServiceStartStop & SetWordpressSettings>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
@@ -438,7 +483,7 @@ export async function activatePlausibleUsers(request: FastifyRequest<OnlyId>, re
|
||||
if (destinationDockerId) {
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker exec ${id} 'psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"'`
|
||||
command: `docker exec ${id}-postgresql psql -H postgresql://${postgresqlUser}:${postgresqlPassword}@localhost:5432/${postgresqlDatabase} -c "UPDATE users SET email_verified = true;"`
|
||||
})
|
||||
return await reply.code(201).send()
|
||||
}
|
||||
@@ -458,7 +503,7 @@ export async function cleanupPlausibleLogs(request: FastifyRequest<OnlyId>, repl
|
||||
if (destinationDockerId) {
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker exec ${id}-clickhouse sh -c "/usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\""`
|
||||
command: `docker exec ${id}-clickhouse /usr/bin/clickhouse-client -q \\"SELECT name FROM system.tables WHERE name LIKE '%log%';\\"| xargs -I{} /usr/bin/clickhouse-client -q \"TRUNCATE TABLE system.{};\"`
|
||||
})
|
||||
return await reply.code(201).send()
|
||||
}
|
||||
@@ -471,9 +516,9 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
||||
const { id } = request.params
|
||||
const { ftpEnabled } = request.body;
|
||||
|
||||
const { service: { destinationDocker: { id: dockerId } } } = await prisma.wordpress.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } })
|
||||
const { service: { destinationDocker: { engine, remoteEngine, remoteIpAddress } } } = await prisma.wordpress.findUnique({ where: { serviceId: id }, include: { service: { include: { destinationDocker: true } } } })
|
||||
|
||||
const publicPort = await getFreePublicPort(id, dockerId);
|
||||
const publicPort = await getFreePublicPort({ id, remoteEngine, engine, remoteIpAddress });
|
||||
|
||||
let ftpUser = cuid();
|
||||
let ftpPassword = generatePassword({});
|
||||
@@ -540,17 +585,14 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
||||
});
|
||||
|
||||
try {
|
||||
const isRunning = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
||||
const { found: isRunning } = await checkContainer({ dockerId: destinationDocker.id, container: `${id}-ftp` });
|
||||
if (isRunning) {
|
||||
await executeDockerCmd({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker stop -t 0 ${id}-ftp && docker rm ${id}-ftp`
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
//
|
||||
}
|
||||
} catch (error) { }
|
||||
const volumes = [
|
||||
`${id}-wordpress-data:/home/${ftpUser}/wordpress`,
|
||||
`${isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
@@ -629,9 +671,7 @@ export async function activateWordpressFtp(request: FastifyRequest<ActivateWordp
|
||||
await asyncExecShell(
|
||||
`rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
} catch (error) { }
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
checkService,
|
||||
checkServiceDomain,
|
||||
cleanupPlausibleLogs,
|
||||
cleanupUnconfiguredServices,
|
||||
deleteService,
|
||||
deleteServiceSecret,
|
||||
deleteServiceStorage,
|
||||
@@ -29,8 +30,8 @@ import {
|
||||
} from './handlers';
|
||||
|
||||
import type { OnlyId } from '../../../../types';
|
||||
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetWordpressSettings } from './types';
|
||||
import { startService, stopService } from '../../../../lib/services/handlers';
|
||||
import type { ActivateWordpressFtp, CheckService, CheckServiceDomain, DeleteServiceSecret, DeleteServiceStorage, GetServiceLogs, SaveService, SaveServiceDestination, SaveServiceSecret, SaveServiceSettings, SaveServiceStorage, SaveServiceType, SaveServiceVersion, ServiceStartStop, SetGlitchTipSettings, SetWordpressSettings } from './types';
|
||||
import { migrateAppwriteDB, startService, stopService } from '../../../../lib/services/handlers';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
@@ -39,6 +40,8 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/', async (request) => await listServices(request));
|
||||
fastify.post('/new', async (request, reply) => await newService(request, reply));
|
||||
|
||||
fastify.post<any>('/cleanup/unconfigured', async (request) => await cleanupUnconfiguredServices(request));
|
||||
|
||||
fastify.get<OnlyId>('/:id', async (request) => await getService(request));
|
||||
fastify.post<SaveService>('/:id', async (request, reply) => await saveService(request, reply));
|
||||
fastify.delete<OnlyId>('/:id', async (request) => await deleteService(request));
|
||||
@@ -71,11 +74,13 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
|
||||
fastify.post<ServiceStartStop>('/:id/:type/start', async (request) => await startService(request));
|
||||
fastify.post<ServiceStartStop>('/:id/:type/stop', async (request) => await stopService(request));
|
||||
fastify.post<ServiceStartStop & SetWordpressSettings>('/:id/:type/settings', async (request, reply) => await setSettingsService(request, reply));
|
||||
fastify.post<ServiceStartStop & SetWordpressSettings & SetGlitchTipSettings>('/:id/:type/settings', async (request, reply) => await setSettingsService(request, reply));
|
||||
|
||||
fastify.post<OnlyId>('/:id/plausibleanalytics/activate', async (request, reply) => await activatePlausibleUsers(request, reply));
|
||||
fastify.post<OnlyId>('/:id/plausibleanalytics/cleanup', async (request, reply) => await cleanupPlausibleLogs(request, reply));
|
||||
fastify.post<ActivateWordpressFtp>('/:id/wordpress/ftp', async (request, reply) => await activateWordpressFtp(request, reply));
|
||||
|
||||
fastify.post<OnlyId>('/:id/appwrite/migrate', async (request, reply) => await migrateAppwriteDB(request, reply));
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
@@ -89,6 +89,10 @@ export interface ActivateWordpressFtp extends OnlyId {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
export interface SetGlitchTipSettings extends OnlyId {
|
||||
Body: {
|
||||
enableOpenUserRegistration: boolean,
|
||||
emailSmtpUseSsl: boolean,
|
||||
emailSmtpUseTls: boolean
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { promises as dns } from 'dns';
|
||||
import { X509Certificate } from 'node:crypto';
|
||||
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, getDomain, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common';
|
||||
import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types';
|
||||
import { asyncExecShell, checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, isDNSValid, isDomainConfigured, listSettings, prisma } from '../../../../lib/common';
|
||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
||||
|
||||
|
||||
export async function listAllSettings(request: FastifyRequest) {
|
||||
@@ -16,8 +17,16 @@ export async function listAllSettings(request: FastifyRequest) {
|
||||
unencryptedKeys.push({ id: key.id, name: key.name, privateKey: decrypt(key.privateKey), createdAt: key.createdAt })
|
||||
}
|
||||
}
|
||||
const certificates = await prisma.certificate.findMany({ where: { team: { id: teamId } } })
|
||||
let cns = [];
|
||||
for (const certificate of certificates) {
|
||||
const x509 = new X509Certificate(certificate.cert);
|
||||
cns.push({ commonName: x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''), id: certificate.id, createdAt: certificate.createdAt })
|
||||
}
|
||||
|
||||
return {
|
||||
settings,
|
||||
certificates: cns,
|
||||
sshKeys: unencryptedKeys
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
@@ -28,6 +37,7 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
||||
try {
|
||||
const {
|
||||
fqdn,
|
||||
isAPIDebuggingEnabled,
|
||||
isRegistrationEnabled,
|
||||
dualCerts,
|
||||
minPort,
|
||||
@@ -39,7 +49,7 @@ export async function saveSettings(request: FastifyRequest<SaveSettings>, reply:
|
||||
const { id } = await listSettings();
|
||||
await prisma.setting.update({
|
||||
where: { id },
|
||||
data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers }
|
||||
data: { isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled }
|
||||
});
|
||||
if (fqdn) {
|
||||
await prisma.setting.update({ where: { id }, data: { fqdn } });
|
||||
@@ -57,7 +67,7 @@ export async function deleteDomain(request: FastifyRequest<DeleteDomain>, reply:
|
||||
const { fqdn } = request.body
|
||||
const { DNSServers } = await listSettings();
|
||||
if (DNSServers) {
|
||||
dns.setServers([DNSServers]);
|
||||
dns.setServers([...DNSServers.split(',')]);
|
||||
}
|
||||
let ip;
|
||||
try {
|
||||
@@ -117,7 +127,7 @@ export async function saveSSHKey(request: FastifyRequest<SaveSSHKey>, reply: Fas
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
export async function deleteSSHKey(request: FastifyRequest<DeleteSSHKey>, reply: FastifyReply) {
|
||||
export async function deleteSSHKey(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.body;
|
||||
await prisma.sshKey.delete({ where: { id } })
|
||||
@@ -125,4 +135,15 @@ export async function deleteSSHKey(request: FastifyRequest<DeleteSSHKey>, reply:
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteCertificates(request: FastifyRequest<OnlyIdInBody>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.body;
|
||||
await asyncExecShell(`docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`)
|
||||
await prisma.certificate.delete({ where: { id } })
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
@@ -1,21 +1,59 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { checkDNS, checkDomain, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers';
|
||||
import { CheckDNS, CheckDomain, DeleteDomain, DeleteSSHKey, SaveSettings, SaveSSHKey } from './types';
|
||||
import { X509Certificate } from 'node:crypto';
|
||||
|
||||
import { encrypt, errorHandler, prisma } from '../../../../lib/common';
|
||||
import { checkDNS, checkDomain, deleteCertificates, deleteDomain, deleteSSHKey, listAllSettings, saveSettings, saveSSHKey } from './handlers';
|
||||
import { CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey } from './types';
|
||||
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listAllSettings(request));
|
||||
fastify.post<SaveSettings>('/', async (request, reply) => await saveSettings(request, reply));
|
||||
fastify.delete<DeleteDomain>('/', async (request, reply) => await deleteDomain(request, reply));
|
||||
fastify.addHook('onRequest', async (request) => {
|
||||
return await request.jwtVerify()
|
||||
})
|
||||
fastify.get('/', async (request) => await listAllSettings(request));
|
||||
fastify.post<SaveSettings>('/', async (request, reply) => await saveSettings(request, reply));
|
||||
fastify.delete<DeleteDomain>('/', async (request, reply) => await deleteDomain(request, reply));
|
||||
|
||||
fastify.get<CheckDNS>('/check', async (request) => await checkDNS(request));
|
||||
fastify.post<CheckDomain>('/check', async (request) => await checkDomain(request));
|
||||
fastify.get<CheckDNS>('/check', async (request) => await checkDNS(request));
|
||||
fastify.post<CheckDomain>('/check', async (request) => await checkDomain(request));
|
||||
|
||||
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
||||
fastify.delete<DeleteSSHKey>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
||||
fastify.post<SaveSSHKey>('/sshKey', async (request, reply) => await saveSSHKey(request, reply));
|
||||
fastify.delete<OnlyIdInBody>('/sshKey', async (request, reply) => await deleteSSHKey(request, reply));
|
||||
|
||||
fastify.post('/upload', async (request) => {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const certificates = await prisma.certificate.findMany({})
|
||||
let cns = [];
|
||||
for (const certificate of certificates) {
|
||||
const x509 = new X509Certificate(certificate.cert);
|
||||
cns.push(x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''))
|
||||
}
|
||||
const parts = await request.files()
|
||||
let key = null
|
||||
let cert = null
|
||||
for await (const part of parts) {
|
||||
const name = part.fieldname
|
||||
if (name === 'key') key = (await part.toBuffer()).toString()
|
||||
if (name === 'cert') cert = (await part.toBuffer()).toString()
|
||||
}
|
||||
const x509 = new X509Certificate(cert);
|
||||
const cn = x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', '')
|
||||
if (cns.includes(cn)) {
|
||||
throw {
|
||||
message: `A certificate with ${cn} common name already exists.`
|
||||
}
|
||||
}
|
||||
await prisma.certificate.create({ data: { cert, key: encrypt(key), team: { connect: { id: teamId } } } })
|
||||
await prisma.applicationSettings.updateMany({ where: { application: { AND: [{ fqdn: { endsWith: cn } }, { fqdn: { startsWith: 'https' } }] } }, data: { isCustomSSL: true } })
|
||||
return { message: 'Certificated uploaded' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
|
||||
});
|
||||
fastify.delete<OnlyIdInBody>('/certificate', async (request, reply) => await deleteCertificates(request, reply))
|
||||
// fastify.get('/certificates', async (request) => await getCertificates(request))
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
@@ -3,6 +3,7 @@ import { OnlyId } from "../../../../types"
|
||||
export interface SaveSettings {
|
||||
Body: {
|
||||
fqdn: string,
|
||||
isAPIDebuggingEnabled: boolean,
|
||||
isRegistrationEnabled: boolean,
|
||||
dualCerts: boolean,
|
||||
minPort: number,
|
||||
@@ -40,4 +41,9 @@ export interface DeleteSSHKey {
|
||||
Body: {
|
||||
id: string
|
||||
}
|
||||
}
|
||||
export interface OnlyIdInBody {
|
||||
Body: {
|
||||
id: string
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,7 @@ export async function listSources(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user?.teamId;
|
||||
const sources = await prisma.gitSource.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { teams: true, githubApp: true, gitlabApp: true }
|
||||
});
|
||||
return {
|
||||
@@ -22,11 +22,11 @@ export async function listSources(request: FastifyRequest) {
|
||||
export async function saveSource(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, htmlUrl, apiUrl, customPort } = request.body
|
||||
let { name, htmlUrl, apiUrl, customPort, isSystemWide } = request.body
|
||||
if (customPort) customPort = Number(customPort)
|
||||
await prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { name, htmlUrl, apiUrl, customPort }
|
||||
data: { name, htmlUrl, apiUrl, customPort, isSystemWide }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
@@ -56,7 +56,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
|
||||
}
|
||||
|
||||
const source = await prisma.gitSource.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
where: { id, OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (!source) {
|
||||
@@ -104,7 +104,7 @@ export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>
|
||||
const { teamId } = request.user
|
||||
|
||||
const { id } = request.params
|
||||
let { name, htmlUrl, apiUrl, organization, customPort } = request.body
|
||||
let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body
|
||||
|
||||
if (customPort) customPort = Number(customPort)
|
||||
if (id === 'new') {
|
||||
@@ -117,6 +117,7 @@ export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>
|
||||
apiUrl,
|
||||
organization,
|
||||
customPort,
|
||||
isSystemWide,
|
||||
type: 'github',
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ export interface SaveGitHubSource extends OnlyId {
|
||||
apiUrl: string,
|
||||
organization: string,
|
||||
customPort: number,
|
||||
isSystemWide: boolean
|
||||
}
|
||||
}
|
||||
export interface SaveGitLabSource extends OnlyId {
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import axios from "axios";
|
||||
import cuid from "cuid";
|
||||
import crypto from "crypto";
|
||||
import { encrypt, errorHandler, getUIUrl, isDev, prisma } from "../../../lib/common";
|
||||
import { encrypt, errorHandler, getDomain, getUIUrl, isDev, prisma } from "../../../lib/common";
|
||||
import { checkContainer, removeContainer } from "../../../lib/docker";
|
||||
import { scheduler } from "../../../lib/scheduler";
|
||||
import { getApplicationFromDBWebhook } from "../../api/v1/applications/handlers";
|
||||
import { createdBranchDatabase, getApplicationFromDBWebhook, removeBranchDatabase } from "../../api/v1/applications/handlers";
|
||||
|
||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||
import type { GitHubEvents, InstallGithub } from "./types";
|
||||
@@ -67,7 +66,6 @@ export async function configureGitHubApp(request, reply) {
|
||||
}
|
||||
export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promise<any> {
|
||||
try {
|
||||
const buildId = cuid();
|
||||
const allowedGithubEvents = ['push', 'pull_request'];
|
||||
const allowedActions = ['opened', 'reopened', 'synchronize', 'closed'];
|
||||
const githubEvent = request.headers['x-github-event']?.toString().toLowerCase();
|
||||
@@ -87,126 +85,170 @@ export async function gitHubEvents(request: FastifyRequest<GitHubEvents>): Promi
|
||||
if (!projectId || !branch) {
|
||||
throw { status: 500, message: 'Cannot parse projectId or branch from the webhook?!' }
|
||||
}
|
||||
const applicationFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||
if (applicationFound) {
|
||||
const webhookSecret = applicationFound.gitSource.githubApp.webhookSecret || null;
|
||||
//@ts-ignore
|
||||
const hmac = crypto.createHmac('sha256', webhookSecret);
|
||||
const digest = Buffer.from(
|
||||
'sha256=' + hmac.update(JSON.stringify(body)).digest('hex'),
|
||||
'utf8'
|
||||
);
|
||||
if (!isDev) {
|
||||
const checksum = Buffer.from(githubSignature, 'utf8');
|
||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||
if (applicationsFound && applicationsFound.length > 0) {
|
||||
for (const application of applicationsFound) {
|
||||
const buildId = cuid();
|
||||
const webhookSecret = application.gitSource.githubApp.webhookSecret || null;
|
||||
//@ts-ignore
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?' }
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (githubEvent === 'push') {
|
||||
if (!applicationFound.configHash) {
|
||||
const configHash = crypto
|
||||
//@ts-ignore
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack: applicationFound.buildPack,
|
||||
port: applicationFound.port,
|
||||
exposePort: applicationFound.exposePort,
|
||||
installCommand: applicationFound.installCommand,
|
||||
buildCommand: applicationFound.buildCommand,
|
||||
startCommand: applicationFound.startCommand
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
await prisma.application.updateMany({
|
||||
where: { branch, projectId },
|
||||
data: { configHash }
|
||||
});
|
||||
}
|
||||
await prisma.application.update({
|
||||
where: { id: applicationFound.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: applicationFound.id,
|
||||
destinationDockerId: applicationFound.destinationDocker.id,
|
||||
gitSourceId: applicationFound.gitSource.id,
|
||||
githubAppId: applicationFound.gitSource.githubApp?.id,
|
||||
gitlabAppId: applicationFound.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_commit'
|
||||
}
|
||||
});
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
} else if (githubEvent === 'pull_request') {
|
||||
const pullmergeRequestId = body.number.toString();
|
||||
const pullmergeRequestAction = body.action;
|
||||
const sourceBranch = body.pull_request.head.ref.includes('/') ? body.pull_request.head.ref.split('/')[2] : body.pull_request.head.ref;
|
||||
if (!allowedActions.includes(pullmergeRequestAction)) {
|
||||
throw { status: 500, message: 'Action not allowed.' }
|
||||
const hmac = crypto.createHmac('sha256', webhookSecret);
|
||||
const digest = Buffer.from(
|
||||
'sha256=' + hmac.update(JSON.stringify(body)).digest('hex'),
|
||||
'utf8'
|
||||
);
|
||||
if (!isDev) {
|
||||
const checksum = Buffer.from(githubSignature, 'utf8');
|
||||
//@ts-ignore
|
||||
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
|
||||
throw { status: 500, message: 'SHA256 checksum failed. Are you doing something fishy?' }
|
||||
};
|
||||
}
|
||||
|
||||
if (applicationFound.settings.previews) {
|
||||
if (applicationFound.destinationDockerId) {
|
||||
const isRunning = await checkContainer(
|
||||
{
|
||||
dockerId: applicationFound.destinationDocker.id,
|
||||
container: applicationFound.id
|
||||
}
|
||||
);
|
||||
if (!isRunning) {
|
||||
throw { status: 500, message: 'Application not running.' }
|
||||
}
|
||||
}
|
||||
if (
|
||||
pullmergeRequestAction === 'opened' ||
|
||||
pullmergeRequestAction === 'reopened' ||
|
||||
pullmergeRequestAction === 'synchronize'
|
||||
) {
|
||||
if (githubEvent === 'push') {
|
||||
if (!application.configHash) {
|
||||
const configHash = crypto
|
||||
//@ts-ignore
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack: application.buildPack,
|
||||
port: application.port,
|
||||
exposePort: application.exposePort,
|
||||
installCommand: application.installCommand,
|
||||
buildCommand: application.buildCommand,
|
||||
startCommand: application.startCommand
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
await prisma.application.update({
|
||||
where: { id: applicationFound.id },
|
||||
data: { updatedAt: new Date() }
|
||||
where: { id: application.id },
|
||||
data: { configHash }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
pullmergeRequestId,
|
||||
sourceBranch,
|
||||
applicationId: applicationFound.id,
|
||||
destinationDockerId: applicationFound.destinationDocker.id,
|
||||
gitSourceId: applicationFound.gitSource.id,
|
||||
githubAppId: applicationFound.gitSource.githubApp?.id,
|
||||
gitlabAppId: applicationFound.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_pr'
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
} else if (pullmergeRequestAction === 'closed') {
|
||||
if (applicationFound.destinationDockerId) {
|
||||
const id = `${applicationFound.id}-${pullmergeRequestId}`;
|
||||
await removeContainer({ id, dockerId: applicationFound.destinationDocker.id });
|
||||
}
|
||||
return {
|
||||
message: 'Removed preview. Thank you!'
|
||||
};
|
||||
}
|
||||
} else {
|
||||
throw { status: 500, message: 'Pull request previews are not enabled.' }
|
||||
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: application.id,
|
||||
destinationDockerId: application.destinationDocker.id,
|
||||
gitSourceId: application.gitSource.id,
|
||||
githubAppId: application.gitSource.githubApp?.id,
|
||||
gitlabAppId: application.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_commit'
|
||||
}
|
||||
});
|
||||
console.log(`Webhook for ${application.name} queued.`)
|
||||
|
||||
} else if (githubEvent === 'pull_request') {
|
||||
const pullmergeRequestId = body.number.toString();
|
||||
const pullmergeRequestAction = body.action;
|
||||
const sourceBranch = body.pull_request.head.ref.includes('/') ? body.pull_request.head.ref.split('/')[2] : body.pull_request.head.ref;
|
||||
if (!allowedActions.includes(pullmergeRequestAction)) {
|
||||
throw { status: 500, message: 'Action not allowed.' }
|
||||
}
|
||||
|
||||
if (application.settings.previews) {
|
||||
if (application.destinationDockerId) {
|
||||
const { found: isRunning } = await checkContainer(
|
||||
{
|
||||
dockerId: application.destinationDocker.id,
|
||||
container: application.id
|
||||
}
|
||||
);
|
||||
if (!isRunning) {
|
||||
throw { status: 500, message: 'Application not running.' }
|
||||
}
|
||||
}
|
||||
if (
|
||||
pullmergeRequestAction === 'opened' ||
|
||||
pullmergeRequestAction === 'reopened' ||
|
||||
pullmergeRequestAction === 'synchronize'
|
||||
) {
|
||||
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
let previewApplicationId = undefined
|
||||
if (pullmergeRequestId) {
|
||||
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
|
||||
if (foundPreviewApplications.length > 0) {
|
||||
previewApplicationId = foundPreviewApplications[0].id
|
||||
} else {
|
||||
const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
|
||||
const previewApplication = await prisma.previewApplication.create({
|
||||
data: {
|
||||
pullmergeRequestId,
|
||||
sourceBranch,
|
||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
||||
application: { connect: { id: application.id } }
|
||||
}
|
||||
})
|
||||
previewApplicationId = previewApplication.id
|
||||
}
|
||||
}
|
||||
// if (application.connectedDatabase && pullmergeRequestAction === 'opened' || pullmergeRequestAction === 'reopened') {
|
||||
// // Coolify hosted database
|
||||
// if (application.connectedDatabase.databaseId) {
|
||||
// const databaseId = application.connectedDatabase.databaseId;
|
||||
// const database = await prisma.database.findUnique({ where: { id: databaseId } });
|
||||
// if (database) {
|
||||
// await createdBranchDatabase(database, application.connectedDatabase.hostedDatabaseDBName, pullmergeRequestId);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
pullmergeRequestId,
|
||||
previewApplicationId,
|
||||
sourceBranch,
|
||||
applicationId: application.id,
|
||||
destinationDockerId: application.destinationDocker.id,
|
||||
gitSourceId: application.gitSource.id,
|
||||
githubAppId: application.gitSource.githubApp?.id,
|
||||
gitlabAppId: application.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_pr'
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
} else if (pullmergeRequestAction === 'closed') {
|
||||
if (application.destinationDockerId) {
|
||||
const id = `${application.id}-${pullmergeRequestId}`;
|
||||
try {
|
||||
await removeContainer({ id, dockerId: application.destinationDocker.id });
|
||||
} catch (error) { }
|
||||
}
|
||||
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
|
||||
if (foundPreviewApplications.length > 0) {
|
||||
for (const preview of foundPreviewApplications) {
|
||||
await prisma.previewApplication.delete({ where: { id: preview.id } })
|
||||
}
|
||||
}
|
||||
return {
|
||||
message: 'PR closed. Thank you!'
|
||||
};
|
||||
// if (application?.connectedDatabase?.databaseId) {
|
||||
// const databaseId = application.connectedDatabase.databaseId;
|
||||
// const database = await prisma.database.findUnique({ where: { id: databaseId } });
|
||||
// if (database) {
|
||||
// await removeBranchDatabase(database, pullmergeRequestId);
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw { status: 500, message: 'Not handled event.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
|
||||
@@ -2,9 +2,8 @@ import axios from "axios";
|
||||
import cuid from "cuid";
|
||||
import crypto from "crypto";
|
||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||
import { errorHandler, getAPIUrl, isDev, listSettings, prisma } from "../../../lib/common";
|
||||
import { errorHandler, getAPIUrl, getDomain, getUIUrl, isDev, listSettings, prisma } from "../../../lib/common";
|
||||
import { checkContainer, removeContainer } from "../../../lib/docker";
|
||||
import { scheduler } from "../../../lib/scheduler";
|
||||
import { getApplicationFromDB, getApplicationFromDBWebhook } from "../../api/v1/applications/handlers";
|
||||
|
||||
import type { ConfigureGitLabApp, GitLabEvents } from "./types";
|
||||
@@ -30,7 +29,7 @@ export async function configureGitLabApp(request: FastifyRequest<ConfigureGitLab
|
||||
});
|
||||
const { data } = await axios.post(`${htmlUrl}/oauth/token`, params)
|
||||
if (isDev) {
|
||||
return reply.redirect(`${getAPIUrl()}/webhooks/success?token=${data.access_token}`)
|
||||
return reply.redirect(`${getUIUrl()}/webhooks/success?token=${data.access_token}`)
|
||||
}
|
||||
return reply.redirect(`/webhooks/success?token=${data.access_token}`)
|
||||
} catch ({ status, message, ...other }) {
|
||||
@@ -40,63 +39,60 @@ export async function configureGitLabApp(request: FastifyRequest<ConfigureGitLab
|
||||
export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
||||
const { object_kind: objectKind, ref, project_id } = request.body
|
||||
try {
|
||||
const buildId = cuid();
|
||||
|
||||
const allowedActions = ['opened', 'reopen', 'close', 'open', 'update'];
|
||||
|
||||
const webhookToken = request.headers['x-gitlab-token'];
|
||||
if (!webhookToken) {
|
||||
if (!webhookToken && !isDev) {
|
||||
throw { status: 500, message: 'Invalid webhookToken.' }
|
||||
}
|
||||
if (objectKind === 'push') {
|
||||
const projectId = Number(project_id);
|
||||
const branch = ref.split('/')[2];
|
||||
const applicationFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||
if (applicationFound) {
|
||||
if (!applicationFound.configHash) {
|
||||
const configHash = crypto
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack: applicationFound.buildPack,
|
||||
port: applicationFound.port,
|
||||
exposePort: applicationFound.exposePort,
|
||||
installCommand: applicationFound.installCommand,
|
||||
buildCommand: applicationFound.buildCommand,
|
||||
startCommand: applicationFound.startCommand
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
await prisma.application.updateMany({
|
||||
where: { branch, projectId },
|
||||
data: { configHash }
|
||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, branch);
|
||||
if (applicationsFound && applicationsFound.length > 0) {
|
||||
for (const application of applicationsFound) {
|
||||
const buildId = cuid();
|
||||
if (!application.configHash) {
|
||||
const configHash = crypto
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
buildPack: application.buildPack,
|
||||
port: application.port,
|
||||
exposePort: application.exposePort,
|
||||
installCommand: application.installCommand,
|
||||
buildCommand: application.buildCommand,
|
||||
startCommand: application.startCommand
|
||||
})
|
||||
)
|
||||
.digest('hex');
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { configHash }
|
||||
});
|
||||
}
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: application.id,
|
||||
destinationDockerId: application.destinationDocker.id,
|
||||
gitSourceId: application.gitSource.id,
|
||||
githubAppId: application.gitSource.githubApp?.id,
|
||||
gitlabAppId: application.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_commit'
|
||||
}
|
||||
});
|
||||
}
|
||||
await prisma.application.update({
|
||||
where: { id: applicationFound.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
applicationId: applicationFound.id,
|
||||
destinationDockerId: applicationFound.destinationDocker.id,
|
||||
gitSourceId: applicationFound.gitSource.id,
|
||||
githubAppId: applicationFound.gitSource.githubApp?.id,
|
||||
gitlabAppId: applicationFound.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_commit'
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
|
||||
}
|
||||
} else if (objectKind === 'merge_request') {
|
||||
const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch, iid: pullmergeRequestId }, project: { id } } = request.body
|
||||
|
||||
const { object_attributes: { work_in_progress: isDraft, action, source_branch: sourceBranch, target_branch: targetBranch }, project: { id } } = request.body
|
||||
const pullmergeRequestId = request.body.object_attributes.iid.toString();
|
||||
const projectId = Number(id);
|
||||
if (!allowedActions.includes(action)) {
|
||||
throw { status: 500, message: 'Action not allowed.' }
|
||||
@@ -105,64 +101,93 @@ export async function gitLabEvents(request: FastifyRequest<GitLabEvents>) {
|
||||
throw { status: 500, message: 'Draft MR, do nothing.' }
|
||||
}
|
||||
|
||||
const applicationFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
||||
if (applicationFound) {
|
||||
if (applicationFound.settings.previews) {
|
||||
if (applicationFound.destinationDockerId) {
|
||||
const isRunning = await checkContainer(
|
||||
{
|
||||
dockerId: applicationFound.destinationDocker.id,
|
||||
container: applicationFound.id
|
||||
const applicationsFound = await getApplicationFromDBWebhook(projectId, targetBranch);
|
||||
if (applicationsFound && applicationsFound.length > 0) {
|
||||
for (const application of applicationsFound) {
|
||||
const buildId = cuid();
|
||||
if (application.settings.previews) {
|
||||
if (application.destinationDockerId) {
|
||||
const { found: isRunning } = await checkContainer(
|
||||
{
|
||||
dockerId: application.destinationDocker.id,
|
||||
container: application.id
|
||||
}
|
||||
);
|
||||
if (!isRunning) {
|
||||
throw { status: 500, message: 'Application not running.' }
|
||||
}
|
||||
);
|
||||
if (!isRunning) {
|
||||
throw { status: 500, message: 'Application not running.' }
|
||||
}
|
||||
}
|
||||
if (!isDev && applicationFound.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
||||
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!' }
|
||||
}
|
||||
if (
|
||||
action === 'opened' ||
|
||||
action === 'reopen' ||
|
||||
action === 'open' ||
|
||||
action === 'update'
|
||||
) {
|
||||
await prisma.application.update({
|
||||
where: { id: applicationFound.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
pullmergeRequestId,
|
||||
sourceBranch,
|
||||
applicationId: applicationFound.id,
|
||||
destinationDockerId: applicationFound.destinationDocker.id,
|
||||
gitSourceId: applicationFound.gitSource.id,
|
||||
githubAppId: applicationFound.gitSource.githubApp?.id,
|
||||
gitlabAppId: applicationFound.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_mr'
|
||||
if (!isDev && application.gitSource.gitlabApp.webhookToken !== webhookToken) {
|
||||
throw { status: 500, message: 'Invalid webhookToken. Are you doing something nasty?!' }
|
||||
}
|
||||
if (
|
||||
action === 'opened' ||
|
||||
action === 'reopen' ||
|
||||
action === 'open' ||
|
||||
action === 'update'
|
||||
) {
|
||||
await prisma.application.update({
|
||||
where: { id: application.id },
|
||||
data: { updatedAt: new Date() }
|
||||
});
|
||||
let previewApplicationId = undefined
|
||||
if (pullmergeRequestId) {
|
||||
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
|
||||
if (foundPreviewApplications.length > 0) {
|
||||
previewApplicationId = foundPreviewApplications[0].id
|
||||
} else {
|
||||
const protocol = application.fqdn.includes('https://') ? 'https://' : 'http://'
|
||||
const previewApplication = await prisma.previewApplication.create({
|
||||
data: {
|
||||
pullmergeRequestId,
|
||||
sourceBranch,
|
||||
customDomain: `${protocol}${pullmergeRequestId}.${getDomain(application.fqdn)}`,
|
||||
application: { connect: { id: application.id } }
|
||||
}
|
||||
})
|
||||
previewApplicationId = previewApplication.id
|
||||
}
|
||||
}
|
||||
});
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
} else if (action === 'close') {
|
||||
if (applicationFound.destinationDockerId) {
|
||||
const id = `${applicationFound.id}-${pullmergeRequestId}`;
|
||||
await removeContainer({ id, dockerId: applicationFound.destinationDocker.id });
|
||||
await prisma.build.create({
|
||||
data: {
|
||||
id: buildId,
|
||||
pullmergeRequestId,
|
||||
previewApplicationId,
|
||||
sourceBranch,
|
||||
applicationId: application.id,
|
||||
destinationDockerId: application.destinationDocker.id,
|
||||
gitSourceId: application.gitSource.id,
|
||||
githubAppId: application.gitSource.githubApp?.id,
|
||||
gitlabAppId: application.gitSource.gitlabApp?.id,
|
||||
status: 'queued',
|
||||
type: 'webhook_mr'
|
||||
}
|
||||
});
|
||||
return {
|
||||
message: 'Queued. Thank you!'
|
||||
};
|
||||
} else if (action === 'close') {
|
||||
if (application.destinationDockerId) {
|
||||
const id = `${application.id}-${pullmergeRequestId}`;
|
||||
try {
|
||||
await removeContainer({ id, dockerId: application.destinationDocker.id });
|
||||
} catch (error) { }
|
||||
}
|
||||
const foundPreviewApplications = await prisma.previewApplication.findMany({ where: { applicationId: application.id, pullmergeRequestId } })
|
||||
if (foundPreviewApplications.length > 0) {
|
||||
for (const preview of foundPreviewApplications) {
|
||||
await prisma.previewApplication.delete({ where: { id: preview.id } })
|
||||
}
|
||||
}
|
||||
return {
|
||||
message: 'MR closed. Thank you!'
|
||||
};
|
||||
|
||||
}
|
||||
return {
|
||||
message: 'Removed preview. Thank you!'
|
||||
};
|
||||
}
|
||||
}
|
||||
throw { status: 500, message: 'Merge request previews are not enabled.' }
|
||||
}
|
||||
}
|
||||
throw { status: 500, message: 'Not handled event.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
|
||||
@@ -3,15 +3,16 @@ import { errorHandler, getDomain, isDev, prisma, executeDockerCmd } from "../../
|
||||
import { supportedServiceTypesAndVersions } from "../../../lib/services/supportedVersions";
|
||||
import { includeServices } from "../../../lib/services/common";
|
||||
import { TraefikOtherConfiguration } from "./types";
|
||||
import { OnlyId } from "../../../types";
|
||||
|
||||
function configureMiddleware(
|
||||
{ id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type },
|
||||
{ id, container, port, domain, nakedDomain, isHttps, isWWW, isDualCerts, scriptName, type, isCustomSSL },
|
||||
traefik
|
||||
) {
|
||||
if (isHttps) {
|
||||
traefik.http.routers[id] = {
|
||||
entrypoints: ['web'],
|
||||
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
middlewares: ['redirect-to-https']
|
||||
};
|
||||
@@ -25,13 +26,36 @@ function configureMiddleware(
|
||||
]
|
||||
}
|
||||
};
|
||||
if (type === 'appwrite') {
|
||||
traefik.http.routers[`${id}-realtime`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/v1/realtime\`)`,
|
||||
service: `${`${id}-realtime`}`,
|
||||
tls: {
|
||||
domains: {
|
||||
main: `${domain}`
|
||||
}
|
||||
},
|
||||
middlewares: []
|
||||
};
|
||||
|
||||
|
||||
traefik.http.services[`${id}-realtime`] = {
|
||||
loadbalancer: {
|
||||
servers: [
|
||||
{
|
||||
url: `http://${container}-realtime:${port}`
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
if (isDualCerts) {
|
||||
traefik.http.routers[`${id}-secure`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
tls: isCustomSSL ? true : {
|
||||
certresolver: 'letsencrypt'
|
||||
},
|
||||
middlewares: []
|
||||
@@ -40,16 +64,16 @@ function configureMiddleware(
|
||||
if (isWWW) {
|
||||
traefik.http.routers[`${id}-secure-www`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
tls: isCustomSSL ? true : {
|
||||
certresolver: 'letsencrypt'
|
||||
},
|
||||
middlewares: []
|
||||
};
|
||||
traefik.http.routers[`${id}-secure`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`${nakedDomain}\`)`,
|
||||
rule: `Host(\`${nakedDomain}\`) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
domains: {
|
||||
@@ -62,7 +86,7 @@ function configureMiddleware(
|
||||
} else {
|
||||
traefik.http.routers[`${id}-secure-www`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `Host(\`www.${nakedDomain}\`) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
domains: {
|
||||
@@ -73,9 +97,9 @@ function configureMiddleware(
|
||||
};
|
||||
traefik.http.routers[`${id}-secure`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`${domain}\`)`,
|
||||
rule: `Host(\`${domain}\`) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
tls: isCustomSSL ? true : {
|
||||
certresolver: 'letsencrypt'
|
||||
},
|
||||
middlewares: []
|
||||
@@ -86,14 +110,14 @@ function configureMiddleware(
|
||||
} else {
|
||||
traefik.http.routers[id] = {
|
||||
entrypoints: ['web'],
|
||||
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
middlewares: []
|
||||
};
|
||||
|
||||
traefik.http.routers[`${id}-secure`] = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)`,
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/\`)`,
|
||||
service: `${id}`,
|
||||
tls: {
|
||||
domains: {
|
||||
@@ -112,6 +136,23 @@ function configureMiddleware(
|
||||
]
|
||||
}
|
||||
};
|
||||
if (type === 'appwrite') {
|
||||
traefik.http.routers[`${id}-realtime`] = {
|
||||
entrypoints: ['web'],
|
||||
rule: `(Host(\`${nakedDomain}\`) || Host(\`www.${nakedDomain}\`)) && PathPrefix(\`/v1/realtime\`)`,
|
||||
service: `${id}-realtime`,
|
||||
middlewares: []
|
||||
};
|
||||
traefik.http.services[`${id}-realtime`] = {
|
||||
loadbalancer: {
|
||||
servers: [
|
||||
{
|
||||
url: `http://${container}-realtime:${port}`
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (!isDualCerts) {
|
||||
if (isWWW) {
|
||||
@@ -137,7 +178,19 @@ function configureMiddleware(
|
||||
|
||||
export async function traefikConfiguration(request, reply) {
|
||||
try {
|
||||
const sslpath = '/etc/traefik/acme/custom';
|
||||
const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } } } } })
|
||||
let parsedCertificates = []
|
||||
for (const certificate of certificates) {
|
||||
parsedCertificates.push({
|
||||
certFile: `${sslpath}/${certificate.id}-cert.pem`,
|
||||
keyFile: `${sslpath}/${certificate.id}-key.pem`
|
||||
})
|
||||
}
|
||||
const traefik = {
|
||||
tls: {
|
||||
certificates: parsedCertificates
|
||||
},
|
||||
http: {
|
||||
routers: {},
|
||||
services: {},
|
||||
@@ -183,7 +236,7 @@ export async function traefikConfiguration(request, reply) {
|
||||
port,
|
||||
destinationDocker,
|
||||
destinationDockerId,
|
||||
settings: { previews, dualCerts }
|
||||
settings: { previews, dualCerts, isCustomSSL }
|
||||
} = application;
|
||||
if (destinationDockerId) {
|
||||
const { network, id: dockerId } = destinationDocker;
|
||||
@@ -203,7 +256,8 @@ export async function traefikConfiguration(request, reply) {
|
||||
isRunning,
|
||||
isHttps,
|
||||
isWWW,
|
||||
isDualCerts: dualCerts
|
||||
isDualCerts: dualCerts,
|
||||
isCustomSSL
|
||||
});
|
||||
}
|
||||
if (previews) {
|
||||
@@ -226,7 +280,8 @@ export async function traefikConfiguration(request, reply) {
|
||||
nakedDomain,
|
||||
isHttps,
|
||||
isWWW,
|
||||
isDualCerts: dualCerts
|
||||
isDualCerts: dualCerts,
|
||||
isCustomSSL
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -490,10 +545,22 @@ export async function traefikOtherConfiguration(request: FastifyRequest<TraefikO
|
||||
}
|
||||
}
|
||||
|
||||
export async function remoteTraefikConfiguration(request: FastifyRequest) {
|
||||
export async function remoteTraefikConfiguration(request: FastifyRequest<OnlyId>) {
|
||||
const { id } = request.params
|
||||
try {
|
||||
const sslpath = '/etc/traefik/acme/custom';
|
||||
const certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true } } } } })
|
||||
let parsedCertificates = []
|
||||
for (const certificate of certificates) {
|
||||
parsedCertificates.push({
|
||||
certFile: `${sslpath}/${certificate.id}-cert.pem`,
|
||||
keyFile: `${sslpath}/${certificate.id}-key.pem`
|
||||
})
|
||||
}
|
||||
const traefik = {
|
||||
tls: {
|
||||
certificates: parsedCertificates
|
||||
},
|
||||
http: {
|
||||
routers: {},
|
||||
services: {},
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { OnlyId } from '../../../types';
|
||||
import { remoteTraefikConfiguration, traefikConfiguration, traefikOtherConfiguration } from './handlers';
|
||||
import { TraefikOtherConfiguration } from './types';
|
||||
|
||||
@@ -6,7 +7,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get('/main.json', async (request, reply) => traefikConfiguration(request, reply));
|
||||
fastify.get<TraefikOtherConfiguration>('/other.json', async (request, reply) => traefikOtherConfiguration(request));
|
||||
|
||||
fastify.get('/remote/:id', async (request) => remoteTraefikConfiguration(request));
|
||||
fastify.get<OnlyId>('/remote/:id', async (request) => remoteTraefikConfiguration(request));
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
@@ -1,39 +1,4 @@
|
||||
export interface OnlyId {
|
||||
Params: { id: string },
|
||||
}
|
||||
export interface SaveVersion extends OnlyId {
|
||||
Body: {
|
||||
version: string
|
||||
}
|
||||
}
|
||||
export interface SaveDatabaseDestination extends OnlyId {
|
||||
Body: {
|
||||
destinationId: string
|
||||
}
|
||||
}
|
||||
export interface GetDatabaseLogs extends OnlyId {
|
||||
Querystring: {
|
||||
since: number
|
||||
}
|
||||
}
|
||||
export interface SaveDatabase extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
defaultDatabase: string,
|
||||
dbUser: string,
|
||||
dbUserPassword: string,
|
||||
rootUser: string,
|
||||
rootUserPassword: string,
|
||||
version: string,
|
||||
isRunning: boolean
|
||||
}
|
||||
}
|
||||
export interface SaveDatabaseSettings extends OnlyId {
|
||||
Body: {
|
||||
isPublic: boolean,
|
||||
appendOnly: boolean
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
4
apps/i18n/.env.example
Normal file
4
apps/i18n/.env.example
Normal file
@@ -0,0 +1,4 @@
|
||||
WEBLATE_INSTANCE_URL=http://localhost
|
||||
WEBLATE_COMPONENT_NAME=coolify
|
||||
WEBLATE_TOKEN=
|
||||
TRANSLATION_DIR=
|
||||
1
apps/i18n/.gitignore
vendored
Normal file
1
apps/i18n/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
locales/*
|
||||
63
apps/i18n/index.mjs
Normal file
63
apps/i18n/index.mjs
Normal file
@@ -0,0 +1,63 @@
|
||||
import dotenv from 'dotenv';
|
||||
dotenv.config()
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url';
|
||||
import Gettext from 'node-gettext'
|
||||
import { po } from 'gettext-parser'
|
||||
import got from 'got';
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const weblateInstanceURL = process.env.WEBLATE_INSTANCE_URL;
|
||||
const weblateComponentName = process.env.WEBLATE_COMPONENT_NAME
|
||||
const token = process.env.WEBLATE_TOKEN;
|
||||
|
||||
const translationsDir = process.env.TRANSLATION_DIR;
|
||||
const translationsPODir = './locales';
|
||||
const locales = []
|
||||
const domain = 'locale'
|
||||
|
||||
const translations = await got(`${weblateInstanceURL}/api/components/${weblateComponentName}/glossary/translations/?format=json`, {
|
||||
headers: {
|
||||
"Authorization": `Token ${token}`
|
||||
}
|
||||
}).json()
|
||||
for (const translation of translations.results) {
|
||||
const code = translation.language_code
|
||||
locales.push(code)
|
||||
|
||||
const fileUrl = translation.file_url.replace('=json', '=po')
|
||||
const file = await got(fileUrl, {
|
||||
headers: {
|
||||
"Authorization": `Token ${token}`
|
||||
}
|
||||
}).text()
|
||||
fs.writeFileSync(path.join(__dirname, translationsPODir, domain + '-' + code + '.po'), file)
|
||||
}
|
||||
|
||||
|
||||
const gt = new Gettext()
|
||||
|
||||
locales.forEach((locale) => {
|
||||
let json = {}
|
||||
const fileName = `${domain}-${locale}.po`
|
||||
const translationsFilePath = path.join(translationsPODir, fileName)
|
||||
const translationsContent = fs.readFileSync(translationsFilePath)
|
||||
|
||||
const parsedTranslations = po.parse(translationsContent)
|
||||
const a = gt.gettext(parsedTranslations)
|
||||
for (const [key, value] of Object.entries(a)) {
|
||||
if (key === 'translations') {
|
||||
for (const [key1, value1] of Object.entries(value)) {
|
||||
if (key1 !== '') {
|
||||
for (const [key2, value2] of Object.entries(value1)) {
|
||||
json[value2.msgctxt] = value2.msgstr[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(`${translationsDir}/${locale}.json`, JSON.stringify(json))
|
||||
})
|
||||
15
apps/i18n/package.json
Normal file
15
apps/i18n/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "i18n-converter",
|
||||
"description": "Convert Weblate translations to sveltekit-i18n",
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"translate": "node index.mjs"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"node-gettext": "3.0.0",
|
||||
"gettext-parser": "6.0.0",
|
||||
"got": "12.3.1",
|
||||
"dotenv": "16.0.2"
|
||||
}
|
||||
}
|
||||
@@ -14,35 +14,42 @@
|
||||
"format": "prettier --write --plugin-search-dir=. ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@floating-ui/dom": "1.0.1",
|
||||
"@playwright/test": "1.25.1",
|
||||
"@popperjs/core": "2.11.6",
|
||||
"@sveltejs/kit": "1.0.0-next.405",
|
||||
"@types/js-cookie": "3.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "5.35.1",
|
||||
"@typescript-eslint/parser": "5.35.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.36.1",
|
||||
"@typescript-eslint/parser": "5.36.1",
|
||||
"autoprefixer": "10.4.8",
|
||||
"eslint": "8.22.0",
|
||||
"classnames": "2.3.1",
|
||||
"eslint": "8.23.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"eslint-plugin-svelte3": "4.0.0",
|
||||
"flowbite": "1.5.2",
|
||||
"flowbite-svelte": "0.26.2",
|
||||
"postcss": "8.4.16",
|
||||
"prettier": "2.7.1",
|
||||
"prettier-plugin-svelte": "2.7.0",
|
||||
"svelte": "3.49.0",
|
||||
"svelte-check": "2.8.1",
|
||||
"svelte": "3.50.0",
|
||||
"svelte-check": "2.9.0",
|
||||
"svelte-preprocess": "4.10.7",
|
||||
"tailwindcss": "3.1.8",
|
||||
"tailwindcss-scrollbar": "0.1.0",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.7.4",
|
||||
"vite": "3.0.5"
|
||||
"typescript": "4.8.2",
|
||||
"vite": "3.1.0"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@sveltejs/adapter-static": "1.0.0-next.39",
|
||||
"@tailwindcss/typography": "^0.5.4",
|
||||
"@tailwindcss/typography": "^0.5.7",
|
||||
"cuid": "2.1.8",
|
||||
"daisyui": "2.24.0",
|
||||
"daisyui": "2.24.2",
|
||||
"dayjs": "1.11.5",
|
||||
"js-cookie": "3.0.1",
|
||||
"p-limit": "4.0.0",
|
||||
"svelte-file-dropzone": "^1.0.0",
|
||||
"svelte-select": "4.4.7",
|
||||
"sveltekit-i18n": "2.2.2"
|
||||
}
|
||||
|
||||
@@ -3,33 +3,35 @@ import Cookies from 'js-cookie';
|
||||
|
||||
export function getAPIUrl() {
|
||||
if (GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(GITPOD_WORKSPACE_URL)
|
||||
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '')
|
||||
return newURL
|
||||
const { href } = new URL(GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
|
||||
return newURL;
|
||||
}
|
||||
if (CODESANDBOX_HOST) {
|
||||
return `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}`
|
||||
return `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
|
||||
}
|
||||
return dev ? 'http://localhost:3001' : 'http://localhost:3000';
|
||||
return dev
|
||||
? 'http://localhost:3001'
|
||||
: 'http://localhost:3000';
|
||||
}
|
||||
export function getWebhookUrl(type: string) {
|
||||
if (GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(GITPOD_WORKSPACE_URL)
|
||||
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '')
|
||||
const { href } = new URL(GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
|
||||
if (type === 'github') {
|
||||
return `${newURL}/webhooks/github/events`
|
||||
return `${newURL}/webhooks/github/events`;
|
||||
}
|
||||
if (type === 'gitlab') {
|
||||
return `${newURL}/webhooks/gitlab/events`
|
||||
return `${newURL}/webhooks/gitlab/events`;
|
||||
}
|
||||
}
|
||||
if (CODESANDBOX_HOST) {
|
||||
const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/,'3001')}`
|
||||
const newURL = `https://${CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
|
||||
if (type === 'github') {
|
||||
return `${newURL}/webhooks/github/events`
|
||||
return `${newURL}/webhooks/github/events`;
|
||||
}
|
||||
if (type === 'gitlab') {
|
||||
return `${newURL}/webhooks/gitlab/events`
|
||||
return `${newURL}/webhooks/gitlab/events`;
|
||||
}
|
||||
}
|
||||
return `https://webhook.site/0e5beb2c-4e9b-40e2-a89e-32295e570c21/events`;
|
||||
@@ -37,7 +39,7 @@ export function getWebhookUrl(type: string) {
|
||||
async function send({
|
||||
method,
|
||||
path,
|
||||
data = {},
|
||||
data = null,
|
||||
headers,
|
||||
timeout = 120000
|
||||
}: {
|
||||
@@ -51,7 +53,7 @@ async function send({
|
||||
const controller = new AbortController();
|
||||
const id = setTimeout(() => controller.abort(), timeout);
|
||||
const opts: any = { method, headers: {}, body: null, signal: controller.signal };
|
||||
if (Object.keys(data).length > 0) {
|
||||
if (data && Object.keys(data).length > 0) {
|
||||
const parsedData = data;
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (value === '') {
|
||||
@@ -83,7 +85,9 @@ async function send({
|
||||
if (dev && !path.startsWith('https://')) {
|
||||
path = `${getAPIUrl()}${path}`;
|
||||
}
|
||||
|
||||
if (method === 'POST' && data && !opts.body) {
|
||||
opts.body = data;
|
||||
}
|
||||
const response = await fetch(`${path}`, opts);
|
||||
|
||||
clearTimeout(id);
|
||||
@@ -103,7 +107,11 @@ async function send({
|
||||
return {};
|
||||
}
|
||||
if (!response.ok) {
|
||||
if (response.status === 401 && !path.startsWith('https://api.github') && !path.includes('/v4/user')) {
|
||||
if (
|
||||
response.status === 401 &&
|
||||
!path.startsWith('https://api.github') &&
|
||||
!path.includes('/v4/user')
|
||||
) {
|
||||
Cookies.remove('token');
|
||||
}
|
||||
|
||||
@@ -126,7 +134,7 @@ export function del(
|
||||
|
||||
export function post(
|
||||
path: string,
|
||||
data: Record<string, unknown>,
|
||||
data: Record<string, unknown> | FormData,
|
||||
headers?: Record<string, unknown>
|
||||
): Promise<Record<string, any>> {
|
||||
return send({ method: 'POST', path, data, headers });
|
||||
|
||||
@@ -3,7 +3,7 @@ import { addToast } from '$lib/store';
|
||||
export const asyncSleep = (delay: number) =>
|
||||
new Promise((resolve) => setTimeout(resolve, delay));
|
||||
|
||||
export function errorNotification(error: any): void {
|
||||
export function errorNotification(error: any | { message: string }): void {
|
||||
if (error.message) {
|
||||
if (error.message === 'Cannot read properties of undefined (reading \'postMessage\')') {
|
||||
return addToast({
|
||||
@@ -83,4 +83,8 @@ export function handlerNotFoundLoad(error: any, url: URL) {
|
||||
status: 500,
|
||||
error: new Error(`Could not load ${url}`)
|
||||
};
|
||||
}
|
||||
|
||||
export function getRndInteger(min: number, max: number) {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
1
apps/ui/src/lib/components/Beta.svelte
Normal file
1
apps/ui/src/lib/components/Beta.svelte
Normal file
@@ -0,0 +1 @@
|
||||
<span class="badge bg-coollabs-gradient rounded text-white font-normal"> BETA </span>
|
||||
@@ -13,8 +13,9 @@
|
||||
export let id: string;
|
||||
export let name: string;
|
||||
export let placeholder = '';
|
||||
export let inputStyle = '';
|
||||
|
||||
let disabledClass = 'bg-coolback disabled:bg-coolblack';
|
||||
let disabledClass = 'bg-coolback disabled:bg-coolblack w-full';
|
||||
let isHttps = browser && window.location.protocol === 'https:';
|
||||
|
||||
function copyToClipboard() {
|
||||
@@ -32,6 +33,7 @@
|
||||
{#if !isPasswordField || showPassword}
|
||||
{#if textarea}
|
||||
<textarea
|
||||
style={inputStyle}
|
||||
rows="5"
|
||||
class={disabledClass}
|
||||
class:pr-10={true}
|
||||
@@ -47,6 +49,7 @@
|
||||
>
|
||||
{:else}
|
||||
<input
|
||||
style={inputStyle}
|
||||
class={disabledClass}
|
||||
type="text"
|
||||
class:pr-10={true}
|
||||
@@ -63,6 +66,7 @@
|
||||
{/if}
|
||||
{:else}
|
||||
<input
|
||||
style={inputStyle}
|
||||
class={disabledClass}
|
||||
class:pr-10={true}
|
||||
class:pr-20={value && isHttps}
|
||||
@@ -78,7 +82,7 @@
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<div class="absolute top-0 right-0 m-3 cursor-pointer text-stone-600 hover:text-white">
|
||||
<div class="absolute top-0 right-0 flex justify-center items-center h-full cursor-pointer text-stone-600 hover:text-white mr-3">
|
||||
<div class="flex space-x-2">
|
||||
{#if isPasswordField}
|
||||
<div on:click={() => (showPassword = !showPassword)}>
|
||||
|
||||
19
apps/ui/src/lib/components/DocLink.svelte
Normal file
19
apps/ui/src/lib/components/DocLink.svelte
Normal file
@@ -0,0 +1,19 @@
|
||||
<script lang="ts">
|
||||
import Tooltip from './Tooltip.svelte';
|
||||
export let url = 'https://docs.coollabs.io';
|
||||
let id =
|
||||
'cool-' +
|
||||
url
|
||||
.split('')
|
||||
.map((c) => c.charCodeAt(0).toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
.slice(-16);
|
||||
</script>
|
||||
|
||||
<a {id} href={url} target="_blank" class="icons inline-block cursor-pointer text-xs mx-2">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M9.879 7.519c1.171-1.025 3.071-1.025 4.242 0 1.172 1.025 1.172 2.687 0 3.712-.203.179-.43.326-.67.442-.745.361-1.45.999-1.45 1.827v.75M21 12a9 9 0 11-18 0 9 9 0 0118 0zm-9 5.25h.008v.008H12v-.008z" />
|
||||
</svg>
|
||||
|
||||
</a>
|
||||
<Tooltip triggeredBy={`#${id}`}>See details in the documentation</Tooltip>
|
||||
@@ -1,6 +1,43 @@
|
||||
<script lang="ts">
|
||||
export let text: string;
|
||||
export let customClass = 'max-w-[24rem]';
|
||||
// import { onMount } from 'svelte';
|
||||
|
||||
// import Tooltip from './Tooltip.svelte';
|
||||
export let explanation = '';
|
||||
export let position = 'dropdown-right'
|
||||
// let id: any;
|
||||
// let self: any;
|
||||
// onMount(() => {
|
||||
// id = `info-${self.offsetLeft}-${self.offsetTop}`;
|
||||
// });
|
||||
</script>
|
||||
|
||||
<div class="p-2 text-xs text-stone-400 {customClass}">{@html text}</div>
|
||||
<div class={`dropdown dropdown-end ${position}`}>
|
||||
<label tabindex="0" class="btn btn-circle btn-ghost btn-xs text-sky-500">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" class="w-4 h-4 stroke-current"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path></svg>
|
||||
</label>
|
||||
<div tabindex="0" class="card compact dropdown-content shadow bg-coolgray-400 rounded w-64">
|
||||
<div class="card-body">
|
||||
<!-- <h2 class="card-title">You needed more info?</h2> -->
|
||||
<p class="text-xs font-normal">{@html explanation}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- <div {id} class="inline-block mx-2 cursor-pointer" bind:this={self}>
|
||||
<svg
|
||||
fill="none"
|
||||
height="14"
|
||||
shape-rendering="geometricPrecision"
|
||||
stroke="currentColor"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="1.4"
|
||||
viewBox="0 0 24 24"
|
||||
width="14"
|
||||
><path d="M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z" /><path
|
||||
d="M9.09 9a3 3 0 015.83 1c0 2-3 3-3 3"
|
||||
/><circle cx="12" cy="17" r=".5" />
|
||||
</svg>
|
||||
</div>
|
||||
{#if id}
|
||||
<Tooltip triggeredBy={`#${id}`}>{@html explanation}</Tooltip>
|
||||
{/if} -->
|
||||
|
||||
@@ -1,29 +1,36 @@
|
||||
<script lang="ts">
|
||||
import Explainer from '$lib/components/Explainer.svelte';
|
||||
import Beta from './Beta.svelte';
|
||||
import Explaner from './Explainer.svelte';
|
||||
import Tooltip from './Tooltip.svelte';
|
||||
|
||||
export let id: any;
|
||||
export let customClass: any = null;
|
||||
export let setting: any;
|
||||
export let title: any;
|
||||
export let isBeta: any = false;
|
||||
export let description: any;
|
||||
export let isCenter = true;
|
||||
export let disabled = false;
|
||||
export let dataTooltip: any = null;
|
||||
export let loading = false;
|
||||
let triggeredBy = `#${id}`;
|
||||
</script>
|
||||
|
||||
<div class="flex items-center py-4 pr-8">
|
||||
<div class="flex w-96 flex-col">
|
||||
<div class="text-xs font-bold text-stone-100 md:text-base">{title}</div>
|
||||
<Explainer text={description} />
|
||||
<!-- svelte-ignore a11y-label-has-associated-control -->
|
||||
<label>
|
||||
{title}
|
||||
{#if isBeta}
|
||||
<Beta />
|
||||
{/if}
|
||||
{#if description && description !== ''}
|
||||
<Explaner explanation={description} />
|
||||
{/if}
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
class:tooltip-right={dataTooltip}
|
||||
class:tooltip-primary={dataTooltip}
|
||||
class:tooltip={dataTooltip}
|
||||
class:text-center={isCenter}
|
||||
data-tip={dataTooltip}
|
||||
class="flex justify-center"
|
||||
>
|
||||
<div class:text-center={isCenter} class={`flex justify-center ${customClass}`}>
|
||||
<div
|
||||
on:click
|
||||
aria-pressed="false"
|
||||
@@ -32,6 +39,7 @@
|
||||
class:bg-green-600={!loading && setting}
|
||||
class:bg-stone-700={!loading && !setting}
|
||||
class:bg-yellow-500={loading}
|
||||
{id}
|
||||
>
|
||||
<span class="sr-only">Use setting</span>
|
||||
<span
|
||||
@@ -72,3 +80,7 @@
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if dataTooltip}
|
||||
<Tooltip {triggeredBy} placement="top">{dataTooltip}</Tooltip>
|
||||
{/if}
|
||||
|
||||
6
apps/ui/src/lib/components/SimpleExplainer.svelte
Normal file
6
apps/ui/src/lib/components/SimpleExplainer.svelte
Normal file
@@ -0,0 +1,6 @@
|
||||
<script lang="ts">
|
||||
export let text: string;
|
||||
export let customClass = 'max-w-[24rem]';
|
||||
</script>
|
||||
|
||||
<div class="p-2 text-xs text-stone-400 {customClass}">{@html text}</div>
|
||||
@@ -2,6 +2,11 @@
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
const dispatch = createEventDispatcher();
|
||||
export let type = 'info';
|
||||
function success() {
|
||||
if (type === 'success') {
|
||||
return 'bg-coollabs';
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<div
|
||||
@@ -10,8 +15,7 @@
|
||||
on:focus={() => dispatch('pause')}
|
||||
on:mouseout={() => dispatch('resume')}
|
||||
on:blur={() => dispatch('resume')}
|
||||
class="alert shadow-lg text-white rounded hover:scale-105 transition-all duration-100 cursor-pointer"
|
||||
class:bg-coollabs={type === 'success'}
|
||||
class={`flex flex-row alert shadow-lg text-white hover:scale-105 transition-all duration-100 cursor-pointer rounded ${success()}`}
|
||||
class:alert-error={type === 'error'}
|
||||
class:alert-info={type === 'info'}
|
||||
>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
<script lang="ts">
|
||||
import { fade } from 'svelte/transition';
|
||||
import Toast from './Toast.svelte';
|
||||
|
||||
import { dismissToast, pauseToast, resumeToast, toasts } from '$lib/store';
|
||||
@@ -7,9 +6,9 @@
|
||||
|
||||
{#if $toasts}
|
||||
<section>
|
||||
<article class="toast toast-top toast-end rounded-none" role="alert" transition:fade>
|
||||
<article class="toast toast-top toast-end rounded-none px-10" role="alert" >
|
||||
{#each $toasts as toast (toast.id)}
|
||||
<Toast
|
||||
<Toast
|
||||
type={toast.type}
|
||||
on:resume={() => resumeToast(toast.id)}
|
||||
on:pause={() => pauseToast(toast.id)}
|
||||
|
||||
8
apps/ui/src/lib/components/Tooltip.svelte
Normal file
8
apps/ui/src/lib/components/Tooltip.svelte
Normal file
@@ -0,0 +1,8 @@
|
||||
<script lang="ts">
|
||||
import { Tooltip } from 'flowbite-svelte';
|
||||
export let placement = 'bottom';
|
||||
export let color = 'bg-coollabs font-thin text-left';
|
||||
export let triggeredBy = '#tooltip-default';
|
||||
</script>
|
||||
|
||||
<Tooltip {triggeredBy} {placement} arrow={false} {color} style="custom"><slot /></Tooltip>
|
||||
@@ -1,11 +1,11 @@
|
||||
<script lang="ts">
|
||||
import { dev } from '$app/env';
|
||||
import { get, post } from '$lib/api';
|
||||
import { addToast, appSession, features } from '$lib/store';
|
||||
import { addToast, appSession, features, updateLoading, isUpdateAvailable } from '$lib/store';
|
||||
import { asyncSleep, errorNotification } from '$lib/common';
|
||||
import { onMount } from 'svelte';
|
||||
import Tooltip from './Tooltip.svelte';
|
||||
|
||||
let isUpdateAvailable = false;
|
||||
let updateStatus: any = {
|
||||
found: false,
|
||||
loading: false,
|
||||
@@ -16,7 +16,6 @@
|
||||
updateStatus.loading = true;
|
||||
try {
|
||||
if (dev) {
|
||||
console.log(`updating to ${latestVersion}`);
|
||||
await asyncSleep(4000);
|
||||
return window.location.reload();
|
||||
} else {
|
||||
@@ -58,37 +57,41 @@
|
||||
if ($appSession.userId) {
|
||||
const overrideVersion = $features.latestVersion;
|
||||
if ($appSession.teamId === '0') {
|
||||
if ($updateLoading === true) return;
|
||||
try {
|
||||
$updateLoading = true;
|
||||
const data = await get(`/update`);
|
||||
if (overrideVersion || data?.isUpdateAvailable) {
|
||||
latestVersion = overrideVersion || data.latestVersion;
|
||||
if (overrideVersion) {
|
||||
isUpdateAvailable = true;
|
||||
$isUpdateAvailable = true;
|
||||
} else {
|
||||
isUpdateAvailable = data.isUpdateAvailable;
|
||||
$isUpdateAvailable = data.isUpdateAvailable;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return errorNotification(error);
|
||||
} finally {
|
||||
$updateLoading = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="flex flex-col space-y-4 py-2">
|
||||
<div class="py-0 lg:py-2">
|
||||
{#if $appSession.teamId === '0'}
|
||||
{#if isUpdateAvailable}
|
||||
{#if $isUpdateAvailable}
|
||||
<button
|
||||
id="update"
|
||||
disabled={updateStatus.success === false}
|
||||
on:click={update}
|
||||
class="icons tooltip tooltip-right tooltip-primary bg-gradient-to-r from-purple-500 via-pink-500 to-red-500 text-white duration-75 hover:scale-105"
|
||||
data-tip="Update Available!"
|
||||
class="icons bg-coollabs-gradient text-white duration-75 hover:scale-105 w-full"
|
||||
>
|
||||
{#if updateStatus.loading}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="lds-heart h-9 w-8"
|
||||
class="lds-heart h-8 w-8"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
@@ -102,24 +105,27 @@
|
||||
/>
|
||||
</svg>
|
||||
{:else if updateStatus.success === null}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="h-9 w-8"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||
<circle cx="12" cy="12" r="9" />
|
||||
<line x1="12" y1="8" x2="8" y2="12" />
|
||||
<line x1="12" y1="8" x2="12" y2="16" />
|
||||
<line x1="16" y1="12" x2="12" y2="8" />
|
||||
</svg>
|
||||
<div class="flex items-center justify-center space-x-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="h-8 w-8"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||
<circle cx="12" cy="12" r="9" />
|
||||
<line x1="12" y1="8" x2="8" y2="12" />
|
||||
<line x1="12" y1="8" x2="12" y2="16" />
|
||||
<line x1="16" y1="12" x2="12" y2="8" />
|
||||
</svg>
|
||||
<span class="flex lg:hidden">Update available</span>
|
||||
</div>
|
||||
{:else if updateStatus.success}
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-9 w-8"
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="h-8 w-8"
|
||||
><path
|
||||
fill="#DD2E44"
|
||||
d="M11.626 7.488c-.112.112-.197.247-.268.395l-.008-.008L.134 33.141l.011.011c-.208.403.14 1.223.853 1.937.713.713 1.533 1.061 1.936.853l.01.01L28.21 24.735l-.008-.009c.147-.07.282-.155.395-.269 1.562-1.562-.971-6.627-5.656-11.313-4.687-4.686-9.752-7.218-11.315-5.656z"
|
||||
@@ -184,6 +190,9 @@
|
||||
>
|
||||
{/if}
|
||||
</button>
|
||||
<Tooltip triggeredBy="#update" placement="right" color="bg-coolgray-200 text-white"
|
||||
>New Version Available!</Tooltip
|
||||
>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
20
apps/ui/src/lib/components/Upload.svelte
Normal file
20
apps/ui/src/lib/components/Upload.svelte
Normal file
@@ -0,0 +1,20 @@
|
||||
<script lang="ts">
|
||||
import { post } from '$lib/api';
|
||||
let cert: any;
|
||||
let key: any;
|
||||
async function submitForm() {
|
||||
const formData = new FormData();
|
||||
formData.append('cert', cert[0]);
|
||||
formData.append('key', key[0]);
|
||||
await post('/upload', formData);
|
||||
}
|
||||
</script>
|
||||
|
||||
<form on:submit|preventDefault={submitForm}>
|
||||
<label for="cert">Certificate</label>
|
||||
<input id="cert" type="file" required name="cert" bind:files={cert} />
|
||||
<label for="key">Private Key</label>
|
||||
<input id="key" type="file" required name="key" bind:files={key} />
|
||||
<br />
|
||||
<input type="submit" />
|
||||
</form>
|
||||
@@ -1,4 +1,5 @@
|
||||
<script lang="ts">
|
||||
export let server: any;
|
||||
let usage = {
|
||||
cpu: {
|
||||
load: [0, 0, 0],
|
||||
@@ -20,39 +21,21 @@
|
||||
let usageInterval: any;
|
||||
let loading = {
|
||||
usage: false,
|
||||
cleanup: false,
|
||||
restart: false
|
||||
cleanup: false
|
||||
};
|
||||
import { addToast, appSession } from '$lib/store';
|
||||
import { onDestroy, onMount } from 'svelte';
|
||||
import { get, post } from '$lib/api';
|
||||
import { errorNotification } from '$lib/common';
|
||||
import Beta from './Beta.svelte';
|
||||
async function getStatus() {
|
||||
if (loading.usage) return;
|
||||
loading.usage = true;
|
||||
const data = await get('/usage');
|
||||
const data = await get(`/servers/usage/${server.id}?remoteEngine=${server.remoteEngine}`);
|
||||
usage = data.usage;
|
||||
loading.usage = false;
|
||||
}
|
||||
async function restartCoolify() {
|
||||
const sure = confirm(
|
||||
'Are you sure you would like to restart Coolify? Currently running deployments will be stopped and restarted.'
|
||||
);
|
||||
if (sure) {
|
||||
loading.restart = true;
|
||||
try {
|
||||
await post(`/internal/restart`, {});
|
||||
addToast({
|
||||
type: 'success',
|
||||
message: 'Coolify restarted successfully. It will take a moment.'
|
||||
});
|
||||
} catch (error) {
|
||||
return errorNotification(error);
|
||||
} finally {
|
||||
loading.restart = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onDestroy(() => {
|
||||
clearInterval(usageInterval);
|
||||
});
|
||||
@@ -71,7 +54,7 @@
|
||||
async function manuallyCleanupStorage() {
|
||||
try {
|
||||
loading.cleanup = true;
|
||||
await post('/internal/cleanup', {});
|
||||
await post('/internal/cleanup', { serverId: server.id });
|
||||
return addToast({
|
||||
message: 'Cleanup done.',
|
||||
type: 'success'
|
||||
@@ -84,45 +67,63 @@
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="w-full">
|
||||
<div class="flex lg:flex-row flex-col gap-4">
|
||||
<h1 class="title lg:text-3xl">Hardware Details</h1>
|
||||
<div class="flex lg:flex-row flex-col space-x-0 lg:space-x-2 space-y-2 lg:space-y-0">
|
||||
{#if $appSession.teamId === '0'}
|
||||
<button
|
||||
on:click={manuallyCleanupStorage}
|
||||
class:loading={loading.cleanup}
|
||||
class="btn btn-sm">Cleanup Storage</button
|
||||
>
|
||||
<button
|
||||
on:click={restartCoolify}
|
||||
class:loading={loading.restart}
|
||||
class="btn btn-sm bg-red-600 hover:bg-red-500">Restart Coolify</button
|
||||
>
|
||||
{/if}
|
||||
<div class="w-full relative p-5 ">
|
||||
{#if loading.usage}
|
||||
<span class="indicator-item badge bg-yellow-500 badge-sm" />
|
||||
{:else}
|
||||
<span class="indicator-item badge bg-success badge-sm" />
|
||||
{/if}
|
||||
|
||||
<div class="w-full flex flex-col lg:flex-row space-y-4 lg:space-y-0 space-x-4">
|
||||
<div class="flex flex-col">
|
||||
<h1 class="font-bold text-lg lg:text-xl truncate">
|
||||
{server.name}
|
||||
{#if server.remoteEngine}
|
||||
<Beta />
|
||||
{/if}
|
||||
</h1>
|
||||
<div class="text-xs">
|
||||
{#if server?.remoteIpAddress}
|
||||
<h2>{server?.remoteIpAddress}</h2>
|
||||
{:else}
|
||||
<h2>localhost</h2>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{#if $appSession.teamId === '0'}
|
||||
<button
|
||||
disabled={loading.cleanup}
|
||||
on:click={manuallyCleanupStorage}
|
||||
class:loading={loading.cleanup}
|
||||
class:bg-coollabs={!loading.cleanup}
|
||||
class="btn btn-sm">Cleanup Storage</button
|
||||
>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="flex lg:flex-row flex-col gap-4">
|
||||
<div class="flex lg:flex-row flex-col space-x-0 lg:space-x-2 space-y-2 lg:space-y-0" />
|
||||
</div>
|
||||
<div class="divider" />
|
||||
<div class="grid grid-flow-col gap-4 grid-rows-3 justify-start lg:justify-center lg:grid-rows-1">
|
||||
<div class="stats stats-vertical min-w-[16rem] mb-5 rounded bg-transparent">
|
||||
<div class="stat">
|
||||
<div class="stat-title">Total Memory</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{(usage?.memory.totalMemMb).toFixed(0)}<span class="text-sm">MB</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{(usage?.memory?.totalMemMb).toFixed(0)}<span class="text-sm">MB</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">Used Memory</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{(usage?.memory.usedMemMb).toFixed(0)}<span class="text-sm">MB</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{(usage?.memory?.usedMemMb).toFixed(0)}<span class="text-sm">MB</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">Free Memory</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.memory.freeMemPercentage}<span class="text-sm">%</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{(usage?.memory?.freeMemPercentage).toFixed(0)}<span class="text-sm">%</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -130,42 +131,42 @@
|
||||
<div class="stats stats-vertical min-w-[20rem] mb-5 bg-transparent rounded">
|
||||
<div class="stat">
|
||||
<div class="stat-title">Total CPU</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.cpu.count}
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{usage?.cpu?.count}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">CPU Usage</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.cpu.usage}<span class="text-sm">%</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{usage?.cpu?.usage}<span class="text-sm">%</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">Load Average (5,10,30mins)</div>
|
||||
<div class="stat-value text-2xl">{usage?.cpu.load}</div>
|
||||
<div class="stat-value text-2xl text-white">{usage?.cpu?.load}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="stats stats-vertical min-w-[16rem] mb-5 bg-transparent rounded">
|
||||
<div class="stat">
|
||||
<div class="stat-title">Total Disk</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.disk.totalGb}<span class="text-sm">GB</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{usage?.disk?.totalGb}<span class="text-sm">GB</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">Used Disk</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.disk.usedGb}<span class="text-sm">GB</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{usage?.disk?.usedGb}<span class="text-sm">GB</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="stat">
|
||||
<div class="stat-title">Free Disk</div>
|
||||
<div class="stat-value text-2xl">
|
||||
{usage?.disk.freePercentage}<span class="text-sm">%</span>
|
||||
<div class="stat-value text-2xl text-white">
|
||||
{usage?.disk?.freePercentage}<span class="text-sm">%</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -16,4 +16,6 @@
|
||||
<Icons.Redis {isAbsolute} />
|
||||
{:else if type === 'couchdb'}
|
||||
<Icons.CouchDB {isAbsolute} />
|
||||
{:else if type === 'edgedb'}
|
||||
<Icons.EdgeDB {isAbsolute} />
|
||||
{/if}
|
||||
|
||||
22
apps/ui/src/lib/components/svg/databases/EdgeDB.svelte
Normal file
22
apps/ui/src/lib/components/svg/databases/EdgeDB.svelte
Normal file
@@ -0,0 +1,22 @@
|
||||
<script lang="ts">
|
||||
export let isAbsolute = false;
|
||||
</script>
|
||||
|
||||
<svg
|
||||
class={isAbsolute ? 'absolute top-0 left-0 -m-8 h-16 w-16' : 'mx-auto w-12 h-12'}
|
||||
width="88"
|
||||
fill="#1F8AED"
|
||||
height="101"
|
||||
viewBox="0 -15 88 101"
|
||||
><path
|
||||
class="pageNav_logoBar__2v4ah"
|
||||
style="transform-origin:center 35.5px"
|
||||
fill-rule="evenodd"
|
||||
clip-rule="evenodd"
|
||||
d="M55.1436 71H58.1436V0H55.1436V71Z"
|
||||
/><path
|
||||
fill-rule="evenodd"
|
||||
clip-rule="evenodd"
|
||||
d="M74.5362 35.3047C74.5362 41.3776 72.1013 42.4662 69.3799 42.4662H63.5935V28.1432H69.3799C72.1013 28.1432 74.5362 29.2318 74.5362 35.3047V35.3047ZM71.5862 35.3047C71.5862 31.0651 70.2971 30.8646 68.4352 30.8646H66.6305V39.7448H68.4352C70.2971 39.7448 71.5862 39.5443 71.5862 35.3047V35.3047ZM40.9348 42.4662V28.1432H50.0442V30.8646H43.9713V33.7865H48.5546V36.4792H43.9713V39.7448H50.0442V42.4662H40.9348ZM80.6092 36.1068V39.7448H83.13C84.7055 39.7448 85.1066 38.7135 85.1066 37.9401C85.1066 37.3385 84.8201 36.1068 82.6717 36.1068H80.6092ZM80.6092 30.8646V33.5859H82.6717C83.8462 33.5859 84.5337 33.0703 84.5337 32.2109C84.5337 31.3516 83.8462 30.8646 82.6717 30.8646H80.6092ZM77.5732 28.1432H83.4169C86.482 28.1432 87.3987 30.2917 87.3987 31.8385C87.3987 33.2708 86.482 34.3021 85.8518 34.5885C87.6851 35.4766 88.0002 37.2813 88.0002 38.1979C88.0002 39.401 87.3987 42.4662 83.4169 42.4662H77.5732V28.1432ZM23.4899 35.3047C23.4899 41.3776 21.055 42.4662 18.3337 42.4662H12.5472V28.1432H18.3337C21.055 28.1432 23.4899 29.2318 23.4899 35.3047V35.3047ZM32.4272 39.8594C33.974 39.8594 34.7761 39.3438 35.0626 39V37.4245H32.599V34.9609H37.4975V40.6615C37.0678 41.3203 34.7188 42.6094 32.5704 42.6094C29.047 42.6094 26.0678 41.2344 26.0678 35.1615C26.0678 29.0885 29.0756 28 31.797 28C36.0652 28 37.1251 30.2344 37.4688 32.2109L34.948 32.7839C34.8048 31.8672 34.0027 30.7214 32.1694 30.7214C30.3074 30.7214 29.0183 30.9219 29.0183 35.1615C29.0183 39.401 30.3647 39.8594 32.4272 39.8594V39.8594ZM20.539 35.3047C20.539 31.0651 19.2499 30.8646 17.3879 30.8646H15.5833V39.7448H17.3879C19.2499 39.7448 20.539 39.5443 20.539 35.3047V35.3047ZM0 42.4662V28.1432H9.10938V30.8646H3.03646V33.7865H7.61979V36.4792H3.03646V39.7448H9.10938V42.4662H0Z"
|
||||
/></svg
|
||||
>
|
||||
@@ -9,15 +9,8 @@
|
||||
viewBox="0 0 309.88 252.72"
|
||||
class={isAbsolute ? 'absolute top-0 left-0 -m-5 h-12 w-12 ' : 'mx-auto w-8 h-8'}
|
||||
>
|
||||
<defs>
|
||||
<style>
|
||||
.cls-1 {
|
||||
fill: #fff;
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<path
|
||||
class="cls-1"
|
||||
fill="#fff"
|
||||
d="M316,10.05a4.2,4.2,0,0,0-2.84-1c-2.84,0-6.5,1.92-8.46,3l-.79.4a26.81,26.81,0,0,1-10.57,2.66c-3.76.12-7,.34-11.22.77-25,2.58-36.15,21.74-46.89,40.27-5.84,10.08-11.88,20.5-20.16,28.57a55.71,55.71,0,0,1-5.46,4.63c-8.57,6.39-19.33,10.9-27.74,14.12-8.07,3.08-16.86,5.85-25.37,8.53-7.78,2.45-15.14,4.76-21.9,7.28-3.05,1.13-5.64,2-7.93,2.76-6.15,2-10.6,3.53-17.08,8-2.53,1.73-5.07,3.6-6.8,5a71.26,71.26,0,0,0-13.54,14.27A84.81,84.81,0,0,1,77.88,163c-1.36,1.34-3.8,2-7.43,2-4.27,0-9.43-.88-14.91-1.81s-11.46-2-16.46-2c-4.07,0-7.17.66-9.5,2,0,0-3.9,2.28-5.56,5.23l1.62.73a33.56,33.56,0,0,1,6.93,5,33.68,33.68,0,0,0,7.19,5.12A6.37,6.37,0,0,1,42,180.72c-.69,1-1.69,2.29-2.74,3.67-5.77,7.55-9.13,12.32-7.2,14.92a6,6,0,0,0,3,.68c12.59,0,19.34-3.27,27.9-7.41,2.47-1.2,5-2.44,8-3.7,5-2.17,10.38-5.63,16.08-9.29,7.55-4.85,15.36-9.87,22.92-12.3a62.3,62.3,0,0,1,19.23-2.7c8,0,16.42,1.07,24.54,2.11,6.06.78,12.32,1.58,18.47,2,2.39.14,4.6.21,6.76.21a78.48,78.48,0,0,0,8.61-.45l.68-.24c4.32-2.65,6.34-8.34,8.29-13.84,1.26-3.54,2.32-6.72,4-8.74a2.06,2.06,0,0,1,.33-.27.4.4,0,0,1,.49.08.25.25,0,0,1,0,.16c-1,21.51-9.67,35.16-18.42,47.3L177,199.14s8.18,0,12.84-1.8c17-5.08,29.84-16.28,39.18-34.14a144.39,144.39,0,0,0,6.16-14.09c.16-.4,1.64-1.14,1.49.93,0,.61-.08,1.29-.13,2h0c0,.42-.06.85-.08,1.28-.25,3-1,9.34-1,9.34l5.25-2.81c12.66-8,22.42-24.14,29.82-49.25,3.09-10.46,5.34-20.85,7.33-30,2.38-11,4.43-20.43,6.78-24.09,3.69-5.74,9.32-9.62,14.77-13.39.75-.51,1.49-1,2.22-1.54,6.86-4.81,13.67-10.36,15.16-20.71l0-.23C317.93,12.92,317,11,316,10.05Z"
|
||||
transform="translate(-7.45 -9.1)"
|
||||
/>
|
||||
|
||||
@@ -6,5 +6,6 @@ export { default as MongoDB } from './MongoDB.svelte';
|
||||
export { default as MySQL } from './MySQL.svelte';
|
||||
export { default as PostgreSQL } from './PostgreSQL.svelte';
|
||||
export { default as Redis } from './Redis.svelte';
|
||||
export { default as EdgeDB } from './EdgeDB.svelte';
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<svg
|
||||
viewBox="0 0 700 240"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class={isAbsolute ? 'w-36 absolute top-0 left-0 -m-3 -mt-5' : 'w-28 h-28 mx-auto'}
|
||||
class={isAbsolute ? 'w-36 absolute top-0 left-0 -m-3 -mt-5' : 'w-full h-10 mx-auto'}
|
||||
><path fill="#FDBC3D" d="m90.694 107.498-.981.39-20.608 8.23 6.332 6.547z" /><path
|
||||
fill="#8EC63F"
|
||||
d="M61.139 77.914 46.632 93 56.9 103.547c8.649-7.169 17.832-10.502 18.653-10.789L61.139 77.914z"
|
||||
|
||||
9
apps/ui/src/lib/components/svg/services/Grafana.svelte
Normal file
9
apps/ui/src/lib/components/svg/services/Grafana.svelte
Normal file
@@ -0,0 +1,9 @@
|
||||
<script lang="ts">
|
||||
export let isAbsolute = false;
|
||||
</script>
|
||||
|
||||
<img
|
||||
alt="grafana logo"
|
||||
class={isAbsolute ? 'w-12 h-12 absolute top-0 left-0 -m-3 -mt-5' : 'w-8 h-8 mx-auto'}
|
||||
src="/grafana.png"
|
||||
/>
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
<svg
|
||||
viewBox="0 0 127 74"
|
||||
class={isAbsolute ? 'w-10 h-10 absolute top-0 left-0 -m-5' : 'w-8 h-8mx-auto'}
|
||||
class={isAbsolute ? 'w-10 h-10 absolute top-0 left-0 -m-5' : 'w-8 h-8 mx-auto'}
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
><path
|
||||
d="M.825 73.993l23.244-59.47A21.85 21.85 0 0144.42.625h14.014L35.19 60.096a21.85 21.85 0 01-20.352 13.897H.825z"
|
||||
|
||||
@@ -40,4 +40,10 @@
|
||||
<Icons.GlitchTip {isAbsolute} />
|
||||
{:else if type === 'searxng'}
|
||||
<Icons.Searxng {isAbsolute} />
|
||||
{:else if type === 'weblate'}
|
||||
<Icons.Weblate {isAbsolute} />
|
||||
{:else if type === 'grafana'}
|
||||
<Icons.Grafana {isAbsolute} />
|
||||
{:else if type === 'trilium'}
|
||||
<Icons.Trilium {isAbsolute} />
|
||||
{/if}
|
||||
|
||||
9
apps/ui/src/lib/components/svg/services/Trilium.svelte
Normal file
9
apps/ui/src/lib/components/svg/services/Trilium.svelte
Normal file
@@ -0,0 +1,9 @@
|
||||
<script lang="ts">
|
||||
export let isAbsolute = false;
|
||||
</script>
|
||||
|
||||
<img
|
||||
alt="trilium logo"
|
||||
class={isAbsolute ? 'w-9 h-9 absolute top-3 left-0 -m-3 -mt-5' : 'w-8 h-8 mx-auto'}
|
||||
src="/trilium.png"
|
||||
/>
|
||||
61
apps/ui/src/lib/components/svg/services/Weblate.svelte
Normal file
61
apps/ui/src/lib/components/svg/services/Weblate.svelte
Normal file
@@ -0,0 +1,61 @@
|
||||
<script lang="ts">
|
||||
export let isAbsolute = false;
|
||||
</script>
|
||||
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class={isAbsolute ? 'w-16 h-16 absolute top-0 left-0 -m-7' : 'w-10 h-10 mx-auto'}
|
||||
version="1.1"
|
||||
viewBox="0 0 300 300"
|
||||
><linearGradient
|
||||
id="a"
|
||||
x1=".3965"
|
||||
x2="98.808"
|
||||
y1="55.253"
|
||||
y2="55.253"
|
||||
gradientTransform="scale(.98308 1.0172)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
><stop stop-color="#00d2e6" offset="0" /><stop
|
||||
stop-color="#2eccaa"
|
||||
offset="1"
|
||||
/></linearGradient
|
||||
><linearGradient
|
||||
id="b"
|
||||
x1="49.017"
|
||||
x2="99.793"
|
||||
y1="137.89"
|
||||
y2="113.96"
|
||||
gradientTransform="scale(1.1631 .8598)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
><stop stop-opacity="0" offset="0" /><stop offset=".51413" /><stop
|
||||
stop-opacity="0"
|
||||
offset="1"
|
||||
/></linearGradient
|
||||
><linearGradient
|
||||
id="c"
|
||||
x1="201.82"
|
||||
x2="103.58"
|
||||
y1="57.649"
|
||||
y2="57.649"
|
||||
gradientTransform="scale(.98308 1.0172)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
><stop stop-color="#1fa385" offset="0" /><stop
|
||||
stop-color="#2eccaa"
|
||||
offset="1"
|
||||
/></linearGradient
|
||||
><g transform="translate(50,76)" fill-rule="evenodd"
|
||||
><path
|
||||
d="m127.25 111.61c-2.8884-0.0145-5.7666-0.6024-8.4797-1.7847-6.1117-2.6626-11.493-7.6912-15.872-14.495 1.2486-2.2193 2.3738-4.5173 3.3784-6.8535 4.4051-10.243 6.5-21.46 6.6607-32.593-0.0233-0.22082-0.0416-0.44244-0.0552-0.66483l-0.0121-0.57132c-0.01-4.3654-0.67459-8.7898-2.1767-12.909-1.7304-4.7458-4.4887-9.4955-8.865-11.348-0.79519-0.33595-1.6316-0.47701-2.4642-0.45737-5.5049-10.289-5.6799-20.149 0-29.537 0.10115 0 0.20619 3.9293e-4 0.30734 0.001179 6.7012 0.07387 13.34 2.1418 19.021 5.7536 15.469 9.835 23.182 29.001 23.352 47.818 2e-3 0.22083-3.9e-4 0.44126-7e-3 0.66169h0.0868c-0.0226 19.887-4.8049 40.054-14.875 56.979zm-34.3 31.216c-14.448 5.9425-31.228 5.6236-45.549-1.025-16.476-7.6476-29.065-22.512-36.818-39.479-13.262-29.022-13.566-63.715-0.98815-93.182 9.4458 3.7788 17.845-2.2397 17.845-2.2397s-0.01945 9.2605 8.9478 13.905c-9.2007 21.556-8.979 47.167 0.2412 68.173 4.4389 10.107 11.22 19.519 20.619 24.842 3.3547 1.8996 7.041 3.126 10.833 3.5862 0.01404 0.0219 0.02808 0.0439 0.04214 0.0658 6.6965 10.449 15.132 19.157 24.828 25.354z"
|
||||
fill="url(#a)"
|
||||
fill-rule="nonzero"
|
||||
/><path
|
||||
d="m127.24 111.61c-2.8869-0.0151-5.7636-0.60296-8.4755-1.7846-6.1127-2.663-11.495-7.6928-15.874-14.498 1.2494-2.2205 2.3754-4.5198 3.3806-6.8572 1.3282-3.0884 2.4463-6.2648 3.3644-9.501 2.128-7.4978 30.382 2.0181 26.072 14.371-2.2239 6.373-5.0394 12.509-8.4675 18.27zm-34.302 31.212c-14.446 5.9396-31.224 5.6198-45.543-1.0278-16.476-7.6476 0.44739-33.303 9.8465-27.981 3.3533 1.8988 7.0378 3.125 10.828 3.5856 0.01567 0.0245 0.03135 0.049 0.04704 0.0735 6.695 10.447 15.128 19.153 24.821 25.349z"
|
||||
fill="url(#b)"
|
||||
opacity=".3"
|
||||
/><path
|
||||
d="m56.762 54.628c-0.0066-0.22043-0.0093-0.44086-7e-3 -0.66169 0.17001-18.817 7.8827-37.983 23.352-47.818 5.6811-3.6118 12.32-5.6798 19.021-5.7536 0.10115-7.8585e-4 0.20619-0.001179 0.30734-0.001179v29.537c-0.83254-0.01965-1.669 0.12141-2.4642 0.45737-4.3763 1.8523-7.1345 6.602-8.865 11.348-1.5021 4.1191-2.1669 8.5434-2.1767 12.909l-0.01206 0.57132c-0.01362 0.2224-0.0319 0.44401-0.05524 0.66483 0.16067 11.134 2.2556 22.35 6.6607 32.593 4.9334 11.472 12.775 22.025 23.847 26.849 8.3526 3.6397 17.612 2.7811 25.182-1.5057 9.3991-5.3226 16.18-14.734 20.619-24.842 9.2202-21.006 9.4419-46.617 0.24121-68.173 8.9673-4.6444 8.9478-13.905 8.9478-13.905s8.3993 6.0185 17.845 2.2397c12.578 29.466 12.274 64.16-0.98815 93.182-7.7535 16.967-20.343 31.831-36.818 39.479-14.667 6.809-31.913 6.9792-46.591 0.58389-13.19-5.7489-23.918-16.106-31.637-28.15-11.179-17.443-16.472-38.678-16.496-59.604z"
|
||||
fill="url(#c)"
|
||||
fill-rule="nonzero"
|
||||
/></g
|
||||
></svg
|
||||
>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user