Compare commits

...

113 Commits

Author SHA1 Message Date
Andras Bacsai
6311627899 Merge pull request #882 from coollabsio/next
v3.12.18
2023-01-24 15:09:51 +01:00
Andras Bacsai
37cea5fb61 update mattermost + traefik 2023-01-24 14:48:03 +01:00
Andras Bacsai
655a8cd60d feat: able to use $$ in traefik config gen
fix: repman icon
2023-01-24 13:52:39 +01:00
Andras Bacsai
4c8babc96a version++ 2023-01-23 10:45:19 +01:00
Andras Bacsai
612bacebed fix: cleanupStuckedContainers 2023-01-23 10:37:28 +01:00
Andras Bacsai
ade7c8566d fix: cleanupStuckedContainers 2023-01-23 10:37:14 +01:00
Andras Bacsai
19553ce5c8 Merge pull request #874 from coollabsio/next
v3.12.17
2023-01-20 21:14:06 +01:00
Andras Bacsai
18ed2527e8 fix 2023-01-20 20:51:37 +01:00
Andras Bacsai
b0652bc884 Merge pull request #872 from coollabsio/next
Next
2023-01-20 14:07:46 +01:00
Andras Bacsai
15c9ad23fe fix: stucked containers 2023-01-20 14:06:55 +01:00
Andras Bacsai
578bb12562 test new release gh action 2023-01-20 14:05:07 +01:00
Andras Bacsai
f82cfda07f version++ 2023-01-20 13:55:05 +01:00
Andras Bacsai
9e52b2788d Pocketbase GH release updated 2023-01-20 13:49:39 +01:00
Andras Bacsai
2e56a113d9 Test GH release thing 2023-01-20 13:48:57 +01:00
Andras Bacsai
4722d777e6 Merge pull request #871 from coollabsio/next
v3.12.15
2023-01-20 13:22:11 +01:00
Andras Bacsai
2141d54ae0 fix 2023-01-20 13:15:52 +01:00
Andras Bacsai
e346225136 fix 2023-01-20 13:10:40 +01:00
Andras Bacsai
012d4dae56 testing 2023-01-20 11:15:38 +01:00
Andras Bacsai
b4d9fe70af fix 2023-01-20 10:43:21 +01:00
Andras Bacsai
85e83b5441 Test new gh actions 2023-01-20 10:42:13 +01:00
Andras Bacsai
6b2a453b8f fix: deletion + cleanupStuckedContainers 2023-01-20 10:10:36 +01:00
Andras Bacsai
27021538d8 fix: cleanup stucked containers 2023-01-20 09:40:29 +01:00
Andras Bacsai
8b57a2b055 fix: cleanup function 2023-01-20 09:26:48 +01:00
Andras Bacsai
75dd894685 Merge pull request #867 from coollabsio/next
v3.12.14
2023-01-19 14:36:05 +01:00
Andras Bacsai
9101ef8774 version++ 2023-01-19 14:33:33 +01:00
Andras Bacsai
5932540630 fix: www redirect 2023-01-19 14:33:20 +01:00
Andras Bacsai
ec376b2e47 Merge pull request #864 from coollabsio/next
v3.12.13
2023-01-18 19:00:03 +01:00
Andras Bacsai
a176562ad0 fix: secrets 2023-01-18 18:51:03 +01:00
Andras Bacsai
becf37b676 Merge pull request #858 from coollabsio/next
v3.12.12
2023-01-17 12:33:51 +01:00
Andras Bacsai
9b5efab8f8 fix: grpc 2023-01-17 11:51:53 +01:00
Andras Bacsai
e98a8ba599 traefik dashbord in dev 2023-01-17 11:12:52 +01:00
Andras Bacsai
7ddac50008 feat: http + h2c paralel 2023-01-17 11:12:42 +01:00
Andras Bacsai
9837ae359f feat: init h2c (http2/grpc) support 2023-01-17 10:35:04 +01:00
Andras Bacsai
710a829dcb version++ 2023-01-17 10:00:50 +01:00
Andras Bacsai
ccd84fa454 fix: build args docker compose 2023-01-17 10:00:27 +01:00
Andras Bacsai
335b36d3a9 Merge pull request #857 from zek/patch-2
Fix docker-compose build args
2023-01-17 09:26:12 +01:00
Talha Zekeriya Durmuş
2be30fae00 Handle string build parameter 2023-01-17 02:11:06 +01:00
Talha Zekeriya Durmuş
db5cd21884 Fix docker-compose build args 2023-01-17 02:04:01 +01:00
Andras Bacsai
bfd3020031 Merge pull request #853 from coollabsio/next
v3.12.11
2023-01-16 12:44:04 +01:00
Andras Bacsai
344c36997a fix: public gh repo reload compose 2023-01-16 12:36:59 +01:00
Andras Bacsai
dfd9272b70 version ++ 2023-01-16 12:17:48 +01:00
Andras Bacsai
359f4520f5 test template + tags during dev 2023-01-16 11:45:45 +01:00
Andras Bacsai
aecf014f4e Merge pull request #843 from zek/soketi-logo
Add Soketi Logo
2023-01-16 11:10:48 +01:00
Andras Bacsai
d2a89ddf84 Merge pull request #844 from zek/repman-logo
Add Repman logo
2023-01-16 11:10:35 +01:00
Andras Bacsai
c01fe153ae Merge pull request #847 from zek/mattermost
Add Matermost Logo
2023-01-16 11:10:14 +01:00
Andras Bacsai
4f4a838799 update templates+tags 2023-01-16 10:58:44 +01:00
Andras Bacsai
ac6f2567eb fix: build env variables with docker compose 2023-01-16 10:42:23 +01:00
Andras Bacsai
05a5816ac6 fix: do not cleanup compose applications as unconfigured 2023-01-16 10:22:14 +01:00
Andras Bacsai
9c8f6e9195 fix: delete apps with previews 2023-01-16 10:16:49 +01:00
Andras Bacsai
2fd001f6d2 fix: docker log sequence 2023-01-16 10:06:41 +01:00
Andras Bacsai
d641d32413 fix: compose file location 2023-01-16 09:48:15 +01:00
Andras Bacsai
18064ef6a2 fixes related to docker-compose 2023-01-16 09:44:08 +01:00
Andras Bacsai
5cb9216add wip: trpc 2023-01-13 15:50:20 +01:00
Andras Bacsai
91c36dc810 wip: trpc 2023-01-13 15:24:43 +01:00
Andras Bacsai
6efb02fa32 wip: trpc 2023-01-13 15:21:54 +01:00
Andras Bacsai
97313e4180 wip: trpc 2023-01-13 14:54:21 +01:00
Andras Bacsai
568ab24fd9 wip: trpc 2023-01-13 14:17:36 +01:00
Talha Zekeriya Durmuş
5a745efcd3 Add Matermost Logo 2023-01-13 02:38:33 +01:00
Andras Bacsai
c651570e62 wip: trpc 2023-01-12 16:50:17 +01:00
Andras Bacsai
8980598085 wip: trpc 2023-01-12 16:43:41 +01:00
Talha Zekeriya Durmuş
c07c742feb Add Repman logo 2023-01-12 01:45:13 +01:00
Talha Zekeriya Durmuş
1053abb9a9 Add Soketi Logo 2023-01-12 01:41:35 +01:00
Andras Bacsai
2c9e57cbb1 Merge pull request #841 from coollabsio/next
v3.12.10
2023-01-11 11:44:22 +01:00
Andras Bacsai
c6eaa2c8a6 update packages in api 2023-01-11 11:35:57 +01:00
Andras Bacsai
5ab5e913ee Merge pull request #840 from zek/patch-1
Fix: add missing variables
2023-01-11 11:33:17 +01:00
Talha Zekeriya Durmuş
cea53ca476 Fix: add missing variables 2023-01-11 11:12:44 +01:00
Andras Bacsai
58af09114b Merge pull request #834 from coollabsio/next
v3.12.9
2023-01-11 11:00:56 +01:00
Andras Bacsai
c4c0417e2d new pocketbase 2023-01-11 10:55:55 +01:00
Andras Bacsai
74f90e6947 Merge pull request #838 from zek/patch-1
Add Build Time Secrets for Laravel
2023-01-11 10:12:59 +01:00
Andras Bacsai
ad5c339780 fix 2023-01-11 10:11:32 +01:00
Andras Bacsai
305823db00 fix: secrets 2023-01-11 09:29:59 +01:00
Talha Zekeriya Durmuş
baf58b298f Add Build Time Secrets 2023-01-11 01:43:43 +01:00
Andras Bacsai
c37367d018 add directus 2023-01-10 15:30:10 +01:00
Andras Bacsai
1c98796e64 new templates + tags + dev mode updated 2023-01-10 13:24:04 +01:00
Andras Bacsai
e686d9a6ea add lock file 2023-01-10 13:01:37 +01:00
Andras Bacsai
a1936b9d59 update jsonwebtoken 2023-01-10 13:01:03 +01:00
Andras Bacsai
834f9c9337 template updates 2023-01-10 13:01:03 +01:00
Andras Bacsai
615f8cfd3b feat: handle invite_only plausible analytics 2023-01-10 13:01:03 +01:00
Andras Bacsai
8ed134105f remove console.log 2023-01-10 13:01:03 +01:00
Andras Bacsai
5d6169b270 Merge pull request #781 from kaname-png/libretranslate
feat(ui): add libretranslate service icon
2023-01-10 12:57:05 +01:00
Andras Bacsai
e83de8b938 fix: local images for reverting 2023-01-10 12:24:22 +01:00
Andras Bacsai
ee55e039b2 Merge pull request #798 from hyddeos/main
fix the console error on Documentation hover
2023-01-10 11:52:57 +01:00
Andras Bacsai
086dd89144 fix: temporary disable dns check with dns servers 2023-01-10 11:50:41 +01:00
Andras Bacsai
68e5d4dd2c fix: doc link 2023-01-10 11:35:10 +01:00
Andras Bacsai
55a35c6bec fix: remove prefetches 2023-01-10 11:31:44 +01:00
Andras Bacsai
d09b4885fe Merge pull request #784 from hyddeos/tool-tip
Change color for Tooltip on hover
2023-01-10 11:29:54 +01:00
Andras Bacsai
a46773e6d8 Merge branch 'next' into tool-tip 2023-01-10 11:29:36 +01:00
Andras Bacsai
a422d0220c fix: add documentation link again 2023-01-10 11:27:43 +01:00
Andras Bacsai
e5eba8430a Merge pull request #783 from hyddeos/doc-in-mob-menu
Add link to the Documentation in the mobile menu
2023-01-10 11:26:42 +01:00
Andras Bacsai
3d235dc316 Merge pull request #794 from TetrisIQ/main
feat: adding icon for whoogle
2023-01-10 11:19:42 +01:00
Andras Bacsai
80d3b4be8c Merge pull request #825 from MrSquaare/feature/openblocks-service
feat: add Openblocks icon
2023-01-10 11:19:13 +01:00
Andras Bacsai
fe8b7480df Merge pull request #836 from coollabsio/feat/git-source-custom-user
fix: custom gitlab git user
2023-01-10 11:17:42 +01:00
Andras Bacsai
cebfc3aaa0 Merge pull request #804 from titouanmathis/feat/git-source-custom-user
feat(git-source): Add support for custom SSH user for GitLab self-hosted
2023-01-10 11:17:01 +01:00
Andras Bacsai
f778b5a12d fix: custom gitlab git user 2023-01-10 11:15:21 +01:00
Andras Bacsai
2244050160 Merge pull request #816 from Yarmeli/main
[Bug] Fixed issue with docker-compose not loading for Gitlab instances
2023-01-10 10:56:03 +01:00
Andras Bacsai
9284e42b62 fix: $ sign in secrets 2023-01-10 10:52:40 +01:00
Andras Bacsai
ee40120496 fix: read-only iam 2023-01-10 10:26:11 +01:00
Andras Bacsai
30cd2149ea fix: read-only permission 2023-01-10 10:15:03 +01:00
Andras Bacsai
395df36d57 chore: version++ 2023-01-10 09:57:27 +01:00
Andras Bacsai
79597ea0e5 fix: parsing secrets 2023-01-10 09:57:01 +01:00
Guillaume Bonnet
283f39270a feat: add Openblocks icon 2023-01-05 12:26:50 +00:00
Andras Bacsai
7d892bb19d esbuild 2022-12-29 22:33:31 +01:00
Yarmeli
a025f124f3 Updated index.svelte with the same changes from +page.svelte 2022-12-29 19:00:11 +00:00
Yarmeli
84f7287bf8 Fixed issue unable to find the docker compose file 2022-12-29 18:54:54 +00:00
Titouan Mathis
2391850218 Add support for custom SSH user for GitLab self-hosted 2022-12-21 15:10:51 +01:00
Eric
af548e6ef8 Change of link "rel" to "external"
To prevent hover console-error like on the documentations-icon in the desktop mode
2022-12-19 20:33:22 +01:00
hyddeos
ed24a9c990 fix the consol error on documentation hover 2022-12-19 20:02:24 +01:00
Alex
0d51b04d79 feat: adding icon for whoogle 2022-12-18 14:13:49 +01:00
hyddeos
379b1de64f change color for Tooltip on hover 2022-12-12 18:24:13 +01:00
hyddeos
f3ff324925 fixed size on icon 2022-12-12 17:50:35 +01:00
hyddeos
0f2160222f Add Documents link to Mobile-Menu 2022-12-12 17:45:04 +01:00
hyddeos
ce3750c51c Add link document to Mobile-menu 2022-12-12 17:38:45 +01:00
Kaname
72a7ea6e91 feat(ui): add libretranslate service icon 2022-12-12 15:48:23 +00:00
491 changed files with 32357 additions and 2803 deletions

View File

@@ -5,7 +5,9 @@ on:
paths: paths:
- "others/pocketbase/*" - "others/pocketbase/*"
- ".github/workflows/pocketbase-release.yml" - ".github/workflows/pocketbase-release.yml"
branches:
- next
- main
jobs: jobs:
arm64: arm64:
runs-on: [self-hosted, arm64] runs-on: [self-hosted, arm64]
@@ -27,7 +29,7 @@ jobs:
context: others/pocketbase/ context: others/pocketbase/
platforms: linux/arm64 platforms: linux/arm64
push: true push: true
tags: coollabsio/pocketbase:0.10.2-arm64 tags: coollabsio/pocketbase:0.11.0-arm64
amd64: amd64:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -48,7 +50,7 @@ jobs:
context: others/pocketbase/ context: others/pocketbase/
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: coollabsio/pocketbase:0.10.2-amd64 tags: coollabsio/pocketbase:0.11.0-amd64
aarch64: aarch64:
runs-on: [self-hosted, arm64] runs-on: [self-hosted, arm64]
steps: steps:
@@ -69,7 +71,7 @@ jobs:
context: others/pocketbase/ context: others/pocketbase/
platforms: linux/aarch64 platforms: linux/aarch64
push: true push: true
tags: coollabsio/pocketbase:0.10.2-aarch64 tags: coollabsio/pocketbase:0.11.0-aarch64
merge-manifest: merge-manifest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [amd64, arm64, aarch64] needs: [amd64, arm64, aarch64]
@@ -87,5 +89,5 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create & publish manifest - name: Create & publish manifest
run: | run: |
docker manifest create coollabsio/pocketbase:0.10.2 --amend coollabsio/pocketbase:0.10.2-amd64 --amend coollabsio/pocketbase:0.10.2-arm64 --amend coollabsio/pocketbase:0.10.2-aarch64 docker manifest create coollabsio/pocketbase:0.11.0 --amend coollabsio/pocketbase:0.11.0-amd64 --amend coollabsio/pocketbase:0.11.0-arm64 --amend coollabsio/pocketbase:0.11.0-aarch64
docker manifest push coollabsio/pocketbase:0.10.2 docker manifest push coollabsio/pocketbase:0.11.0

View File

@@ -54,7 +54,7 @@ jobs:
context: . context: .
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
aarch64: aarch64:
@@ -103,10 +103,10 @@ jobs:
id: package-version id: package-version
- name: Create & publish manifest - name: Create & publish manifest
run: | run: |
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
docker manifest create coollabsio/coolify:latest --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 docker tag coollabsio/coolify:${{steps.package-version.outputs.current-version}} coollabsio/coolify:latest
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}} docker push coollabsio/coolify:latest
docker manifest push coollabsio/coolify:latest docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:latest
- uses: sarisia/actions-status-discord@v1 - uses: sarisia/actions-status-discord@v1
if: always() if: always()
with: with:

View File

@@ -65,7 +65,7 @@ jobs:
context: . context: .
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: coollabsio/coolify:next-amd64 tags: coollabsio/coolify:next
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64 cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
merge-manifest: merge-manifest:
@@ -85,8 +85,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Create & publish manifest - name: Create & publish manifest
run: | run: |
docker manifest create coollabsio/coolify:next --amend coollabsio/coolify:next-amd64 --amend coollabsio/coolify:next-arm64 docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
docker manifest push coollabsio/coolify:next
- uses: sarisia/actions-status-discord@v1 - uses: sarisia/actions-status-discord@v1
if: always() if: always()
with: with:

1
.gitignore vendored
View File

@@ -12,6 +12,7 @@ dist
apps/api/db/*.db apps/api/db/*.db
apps/api/db/migration.db-journal apps/api/db/migration.db-journal
apps/api/core* apps/api/core*
apps/server/build
apps/backup/backups/* apps/backup/backups/*
!apps/backup/backups/.gitkeep !apps/backup/backups/.gitkeep
/logs /logs

4
apps/api/.gitignore vendored
View File

@@ -8,4 +8,6 @@ package
!.env.example !.env.example
dist dist
dev.db dev.db
client client
testTemplate.yaml
testTags.json

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,229 @@
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: "0.10.2" defaultVersion: "9.22"
documentation: https://docs.directus.io/getting-started/introduction.html
type: directus-postgresql
name: Directus
subname: (PostgreSQL)
description: >-
Directus is a free and open-source headless CMS framework for managing custom SQL-based databases.
labels:
- CMS
- headless
services:
$$id:
name: Directus
depends_on:
- $$id-postgresql
- $$id-redis
image: directus/directus:$$core_version
volumes:
- $$id-uploads:/directus/uploads
- $$id-database:/directus/database
- $$id-extensions:/directus/extensions
environment:
- KEY=$$secret_key
- SECRET=$$secret_secret
- DB_CLIENT=pg
- DB_CONNECTION_STRING=$$secret_db_connection_string
- CACHE_ENABLED=true
- CACHE_STORE=redis
- CACHE_REDIS=$$secret_cache_redis
- ADMIN_EMAIL=$$config_admin_email
- ADMIN_PASSWORD=$$secret_admin_password
- CACHE_AUTO_PURGE=true
- PUBLIC_URL=$$config_public_url
ports:
- "8055"
$$id-postgresql:
name: Directus PostgreSQL
depends_on: []
image: postgres:14-alpine
volumes:
- $$id-postgresql-data:/var/lib/postgresql/data
environment:
- POSTGRES_USER=$$config_postgres_user
- POSTGRES_PASSWORD=$$secret_postgres_password
- POSTGRES_DB=$$config_postgres_db
ports: []
$$id-redis:
name: Directus Redis
depends_on: []
image: redis:7.0.4-alpine
command: "--maxmemory 512mb --maxmemory-policy allkeys-lru --maxmemory-samples 5"
volumes:
- "$$id-redis:/data"
environment: []
variables:
- id: $$config_public_url
name: PUBLIC_URL
label: Public URL
defaultValue: $$generate_fqdn
description: ""
- id: $$secret_db_connection_string
name: DB_CONNECTION_STRING
label: Directus Database Url
defaultValue: postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
description: ""
- id: $$config_postgres_db
main: $$id-postgresql
name: POSTGRES_DB
label: Database
defaultValue: directus
description: ""
- id: $$config_postgres_user
main: $$id-postgresql
name: POSTGRES_USER
label: User
defaultValue: $$generate_username
description: ""
- id: $$secret_postgres_password
main: $$id-postgresql
name: POSTGRES_PASSWORD
label: Password
defaultValue: $$generate_password
description: ""
showOnConfiguration: true
- id: $$secret_cache_redis
name: CACHE_REDIS
label: Redis Url
defaultValue: redis://$$id-redis:6379
description: ""
- id: $$config_admin_email
name: ADMIN_EMAIL
label: Initial Admin Email
defaultValue: "admin@example.com"
description: "The email address of the first user that is automatically created. You can change it later in Directus."
- id: $$secret_admin_password
name: ADMIN_PASSWORD
label: Initial Admin Password
defaultValue: $$generate_password
description: "The password of the first user that is automatically created."
showOnConfiguration: true
- id: $$secret_key
name: KEY
label: Key
defaultValue: $$generate_password
description: "Unique identifier for the project."
showOnConfiguration: true
- id: $$secret_secret
name: SECRET
label: Secret
defaultValue: $$generate_password
description: "Secret string for the project."
showOnConfiguration: true
- templateVersion: 1.0.0
defaultVersion: v1.3.8
documentation: https://github.com/LibreTranslate/LibreTranslate
description: Free and Open Source Machine Translation API. 100% self-hosted, offline capable and easy to setup.
type: libretranslate
name: Libretranslate
labels:
- translator
- argos
- python
- libretranslate
services:
$$id:
name: Libretranslate
image: libretranslate/libretranslate:$$core_version
environment:
- LT_HOST=0.0.0.0
- LT_SUGGESTIONS=true
- LT_CHAR_LIMIT=$$config_lt_char_limit
- LT_REQ_LIMIT=$$config_lt_req_limit
- LT_BATCH_LIMIT=$$config_lt_batch_limit
- LT_GA_ID=$$config_lt_ga_id
- LT_DISABLE_WEB_UI=$$config_lt_web_ui
volumes:
- $$id-libretranslate:/libretranslate
ports:
- "5000"
variables:
- id: $$config_lt_char_limit
name: LT_CHAR_LIMIT
label: Char limit
defaultValue: "5000"
description: "Set character limit."
- id: $$config_lt_req_limit
name: LT_REQ_LIMIT
label: Request limit
defaultValue: "5000"
description: "Set maximum number of requests per minute per client."
- id: $$config_lt_batch_limit
name: LT_BATCH_LIMIT
label: Batch Limit
defaultValue: "5000"
description: "Set maximum number of texts to translate in a batch request."
- id: $$config_lt_ga_id
name: LT_GA_ID
label: Google Analytics ID
defaultValue: ""
description: "Enable Google Analytics on the API client page by providing an ID"
- id: $$config_lt_web_ui
name: LT_DISABLE_WEB_UI
label: Web UI
defaultValue: "false"
description: "Disable or enable web ui. True or false."
- templateVersion: 1.0.0
defaultVersion: 0.8.0
documentation: https://github.com/benbusby/whoogle-search
type: whoogle
name: Whoogle Search
description: A self-hosted, ad-free, privacy-respecting metasearch engine
labels:
- search
- google
services:
$$id:
name: Whoogle Search
documentation: https://github.com/benbusby/whoogle-search
depends_on: []
image: benbusby/whoogle-search:$$core_version
cap_drop:
- ALL
environment:
- WHOOGLE_USER=$$config_whoogle_username
- WHOOGLE_PASS=$$secret_whoogle_password
- WHOOGLE_CONFIG_PREFERENCES_KEY=$$secret_whoogle_preferences_key
ulimits:
nofile:
soft: 262144
hard: 262144
ports:
- "5000"
variables:
- id: $$config_whoogle_username
name: WHOOGLE_USER
label: Whoogle User
defaultValue: $$generate_username
description: "Username to log into Whoogle"
- id: $$secret_whoogle_password
name: WHOOGLE_PASSWORD
label: Whoogle Password
defaultValue: $$generate_password
description: "Password to log into Whoogle"
showOnConfiguration: true
- id: $$secret_whoogle_preferences_key
name: WHOOGLE_CONFIG_PREFERENCES_KEY
label: Whoogle preferences key
defaultValue: $$generate_password
description: "password to encrypt preferences"
- templateVersion: 1.0.0
defaultVersion: 1.1.3
documentation: https://docs.openblocks.dev/
type: openblocks
name: Openblocks
description: The Open Source Retool Alternative
services:
$$id:
image: openblocksdev/openblocks-ce:$$core_version
volumes:
- $$id-stacks-data:/openblocks-stacks
ports:
- "3000"
- templateVersion: 1.0.0
defaultVersion: "0.11.0"
documentation: https://pocketbase.io/docs/ documentation: https://pocketbase.io/docs/
type: pocketbase type: pocketbase
name: Pocketbase name: Pocketbase
@@ -124,12 +348,12 @@
description: "" description: ""
- id: $$config_disable_auth - id: $$config_disable_auth
name: DISABLE_AUTH name: DISABLE_AUTH
label: Disable Authentication label: Authentication
defaultValue: "false" defaultValue: "false"
description: "" description: ""
- id: $$config_disable_registration - id: $$config_disable_registration
name: DISABLE_REGISTRATION name: DISABLE_REGISTRATION
label: Disable Registration label: Registration
defaultValue: "true" defaultValue: "true"
description: "" description: ""
- id: $$config_postgres_user - id: $$config_postgres_user
@@ -157,7 +381,7 @@
defaultValue: plausible.js defaultValue: plausible.js
description: This is the default script name. description: This is the default script name.
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: "1.17" defaultVersion: "1.18"
documentation: https://docs.gitea.io documentation: https://docs.gitea.io
type: gitea type: gitea
name: Gitea name: Gitea
@@ -332,12 +556,12 @@
volumes: volumes:
- $$id-lavalink:/lavalink - $$id-lavalink:/lavalink
ports: ports:
- "2333" - $$config_port
files: files:
- location: /opt/Lavalink/application.yml - location: /opt/Lavalink/application.yml
content: >- content: >-
server: server:
port: $$config_port port: 2333
address: 0.0.0.0 address: 0.0.0.0
lavalink: lavalink:
server: server:
@@ -364,18 +588,13 @@
max-file-size: 1GB max-file-size: 1GB
max-history: 30 max-history: 30
variables: variables:
- id: $$config_port
name: PORT
label: Port
defaultValue: "2333"
required: true
- id: $$secret_password - id: $$secret_password
name: PASSWORD name: PASSWORD
label: Password label: Password
defaultValue: $$generate_password defaultValue: $$generate_password
required: true required: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: v1.8.9 defaultVersion: v1.9.3
documentation: https://docs.appsmith.com/getting-started/setup/instance-configuration/ documentation: https://docs.appsmith.com/getting-started/setup/instance-configuration/
type: appsmith type: appsmith
name: Appsmith name: Appsmith
@@ -408,7 +627,7 @@
defaultValue: "true" defaultValue: "true"
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 0.57.4 defaultVersion: 0.58.4
documentation: https://hub.docker.com/r/zadam/trilium documentation: https://hub.docker.com/r/zadam/trilium
description: "A hierarchical note taking application with focus on building large personal knowledge bases." description: "A hierarchical note taking application with focus on building large personal knowledge bases."
labels: labels:
@@ -428,7 +647,7 @@
- "8080" - "8080"
variables: [] variables: []
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 1.18.5 defaultVersion: 1.19.4
documentation: https://hub.docker.com/r/louislam/uptime-kuma documentation: https://hub.docker.com/r/louislam/uptime-kuma
description: A free & fancy self-hosted monitoring tool. description: A free & fancy self-hosted monitoring tool.
labels: labels:
@@ -445,7 +664,7 @@
- "3001" - "3001"
variables: [] variables: []
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: "5.8" defaultVersion: "6.0"
documentation: https://hub.docker.com/r/silviof/docker-languagetool documentation: https://hub.docker.com/r/silviof/docker-languagetool
description: "A multilingual grammar, style and spell checker." description: "A multilingual grammar, style and spell checker."
type: languagetool type: languagetool
@@ -460,7 +679,7 @@
- "8010" - "8010"
variables: [] variables: []
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 1.26.0 defaultVersion: 1.27.0
documentation: https://hub.docker.com/r/vaultwarden/server documentation: https://hub.docker.com/r/vaultwarden/server
description: "Bitwarden compatible server written in Rust." description: "Bitwarden compatible server written in Rust."
type: vaultwarden type: vaultwarden
@@ -478,7 +697,7 @@
- "80" - "80"
variables: [] variables: []
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 9.3.1 defaultVersion: 9.3.2
documentation: https://hub.docker.com/r/grafana/grafana documentation: https://hub.docker.com/r/grafana/grafana
type: grafana type: grafana
name: Grafana name: Grafana
@@ -499,7 +718,7 @@
- "3000" - "3000"
variables: [] variables: []
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 1.1.2 defaultVersion: 1.2.0
documentation: https://appwrite.io/docs documentation: https://appwrite.io/docs
type: appwrite type: appwrite
name: Appwrite name: Appwrite
@@ -1669,7 +1888,7 @@
defaultValue: weblate defaultValue: weblate
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 2022.12.12-966e9c3c defaultVersion: 2023.01.15-52d41559
documentation: https://docs.searxng.org/ documentation: https://docs.searxng.org/
type: searxng type: searxng
name: SearXNG name: SearXNG
@@ -1742,7 +1961,7 @@
defaultValue: $$generate_password defaultValue: $$generate_password
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: v3.0.0 defaultVersion: v3.0.2
documentation: https://glitchtip.com/documentation documentation: https://glitchtip.com/documentation
type: glitchtip type: glitchtip
name: GlitchTip name: GlitchTip
@@ -1964,7 +2183,7 @@
defaultValue: glitchtip defaultValue: glitchtip
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: v2.16.0 defaultVersion: v2.16.1
documentation: https://hasura.io/docs/latest/index/ documentation: https://hasura.io/docs/latest/index/
type: hasura type: hasura
name: Hasura name: Hasura
@@ -2444,7 +2663,7 @@
description: "" description: ""
showOnConfiguration: true showOnConfiguration: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: v0.30.1 defaultVersion: v0.30.5
documentation: https://docs.meilisearch.com/learn/getting_started/quick_start.html documentation: https://docs.meilisearch.com/learn/getting_started/quick_start.html
type: meilisearch type: meilisearch
name: MeiliSearch name: MeiliSearch
@@ -2474,7 +2693,7 @@
showOnConfiguration: true showOnConfiguration: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
ignore: true ignore: true
defaultVersion: latest defaultVersion: 5.30.0
documentation: https://docs.ghost.org documentation: https://docs.ghost.org
arch: amd64 arch: amd64
type: ghost-mariadb type: ghost-mariadb
@@ -2592,7 +2811,7 @@
defaultValue: $$generate_password defaultValue: $$generate_password
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: "5.25.3" defaultVersion: 5.30.0
documentation: https://docs.ghost.org documentation: https://docs.ghost.org
type: ghost-only type: ghost-only
name: Ghost name: Ghost
@@ -2656,7 +2875,7 @@
placeholder: "ghost_db" placeholder: "ghost_db"
required: true required: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: "5.25.3" defaultVersion: 5.30.0
documentation: https://docs.ghost.org documentation: https://docs.ghost.org
type: ghost-mysql type: ghost-mysql
name: Ghost name: Ghost
@@ -2733,7 +2952,7 @@
defaultValue: $$generate_password defaultValue: $$generate_password
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: php8.1 defaultVersion: php8.2
documentation: https://wordpress.org/ documentation: https://wordpress.org/
type: wordpress type: wordpress
name: WordPress name: WordPress
@@ -2823,7 +3042,7 @@
description: "" description: ""
readOnly: true readOnly: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: php8.1 defaultVersion: php8.2
documentation: https://wordpress.org/ documentation: https://wordpress.org/
type: wordpress-only type: wordpress-only
name: WordPress name: WordPress
@@ -2897,7 +3116,7 @@
define('WP_DEBUG_DISPLAY', false); define('WP_DEBUG_DISPLAY', false);
@ini_set('display_errors', 0); @ini_set('display_errors', 0);
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 4.9.0 defaultVersion: 4.9.1
documentation: https://coder.com/docs/coder-oss/latest documentation: https://coder.com/docs/coder-oss/latest
type: vscodeserver type: vscodeserver
name: VSCode Server name: VSCode Server
@@ -2912,7 +3131,6 @@
depends_on: [] depends_on: []
image: "codercom/code-server:$$core_version" image: "codercom/code-server:$$core_version"
volumes: volumes:
- "$$id-config-data:/home/coder/.local/share/code-server"
- "$$id-vscodeserver-data:/home/coder" - "$$id-vscodeserver-data:/home/coder"
- "$$id-keys-directory:/root/.ssh" - "$$id-keys-directory:/root/.ssh"
- "$$id-theme-and-plugin-directory:/root/.local/share/code-server" - "$$id-theme-and-plugin-directory:/root/.local/share/code-server"
@@ -2928,7 +3146,7 @@
description: "" description: ""
showOnConfiguration: true showOnConfiguration: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: RELEASE.2022-12-12T19-27-27Z defaultVersion: RELEASE.2023-01-12T02-06-16Z
documentation: https://min.io/docs/minio documentation: https://min.io/docs/minio
type: minio type: minio
name: MinIO name: MinIO
@@ -2987,7 +3205,7 @@
description: "" description: ""
showOnConfiguration: true showOnConfiguration: true
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 0.21.1 defaultVersion: stable
documentation: https://fider.io/docs documentation: https://fider.io/docs
type: fider type: fider
name: Fider name: Fider
@@ -3106,7 +3324,7 @@
defaultValue: $$generate_username defaultValue: $$generate_username
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 0.207.0 defaultVersion: 0.210.1
documentation: https://docs.n8n.io documentation: https://docs.n8n.io
type: n8n type: n8n
name: n8n.io name: n8n.io
@@ -3137,7 +3355,7 @@
defaultValue: $$generate_fqdn defaultValue: $$generate_fqdn
description: "" description: ""
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: stable defaultVersion: v1.5.1
documentation: https://plausible.io/doc/ documentation: https://plausible.io/doc/
arch: amd64 arch: amd64
type: plausibleanalytics type: plausibleanalytics
@@ -3250,12 +3468,12 @@
description: "" description: ""
- id: $$config_disable_auth - id: $$config_disable_auth
name: DISABLE_AUTH name: DISABLE_AUTH
label: Disable Authentication label: Authentication
defaultValue: "false" defaultValue: "false"
description: "" description: ""
- id: $$config_disable_registration - id: $$config_disable_registration
name: DISABLE_REGISTRATION name: DISABLE_REGISTRATION
label: Disable Registration label: Registration
defaultValue: "true" defaultValue: "true"
description: "" description: ""
- id: $$config_postgresql_username - id: $$config_postgresql_username
@@ -3283,7 +3501,7 @@
defaultValue: plausible.js defaultValue: plausible.js
description: This is the default script name. description: This is the default script name.
- templateVersion: 1.0.0 - templateVersion: 1.0.0
defaultVersion: 0.99.1 defaultVersion: 0.101.2
documentation: https://docs.nocodb.com documentation: https://docs.nocodb.com
type: nocodb type: nocodb
name: NocoDB name: NocoDB

View File

@@ -16,31 +16,31 @@
}, },
"dependencies": { "dependencies": {
"@breejs/ts-worker": "2.0.0", "@breejs/ts-worker": "2.0.0",
"@fastify/autoload": "5.5.0", "@fastify/autoload": "5.7.0",
"@fastify/cookie": "8.3.0", "@fastify/cookie": "8.3.0",
"@fastify/cors": "8.2.0", "@fastify/cors": "8.2.0",
"@fastify/env": "4.1.0", "@fastify/env": "4.2.0",
"@fastify/jwt": "6.3.3", "@fastify/jwt": "6.5.0",
"@fastify/multipart": "7.3.0", "@fastify/multipart": "7.3.0",
"@fastify/static": "6.5.1", "@fastify/static": "6.6.0",
"@iarna/toml": "2.2.5", "@iarna/toml": "2.2.5",
"@ladjs/graceful": "3.0.2", "@ladjs/graceful": "3.2.1",
"@prisma/client": "4.6.1", "@prisma/client": "4.8.1",
"@sentry/node": "7.21.1", "@sentry/node": "7.30.0",
"@sentry/tracing": "7.21.1", "@sentry/tracing": "7.30.0",
"axe": "11.0.0", "axe": "11.2.1",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bree": "9.1.2", "bree": "9.1.3",
"cabin": "11.0.1", "cabin": "11.1.1",
"compare-versions": "5.0.1", "compare-versions": "5.0.1",
"csv-parse": "5.3.2", "csv-parse": "5.3.3",
"csvtojson": "2.0.10", "csvtojson": "2.0.10",
"cuid": "2.1.8", "cuid": "2.1.8",
"dayjs": "1.11.6", "dayjs": "1.11.7",
"dockerode": "3.3.4", "dockerode": "3.3.4",
"dotenv-extended": "2.9.0", "dotenv-extended": "2.9.0",
"execa": "6.1.0", "execa": "6.1.0",
"fastify": "4.10.2", "fastify": "4.11.0",
"fastify-plugin": "4.3.0", "fastify-plugin": "4.3.0",
"fastify-socket.io": "4.0.0", "fastify-socket.io": "4.0.0",
"generate-password": "1.7.0", "generate-password": "1.7.0",
@@ -48,36 +48,36 @@
"is-ip": "5.0.0", "is-ip": "5.0.0",
"is-port-reachable": "4.0.0", "is-port-reachable": "4.0.0",
"js-yaml": "4.1.0", "js-yaml": "4.1.0",
"jsonwebtoken": "8.5.1", "jsonwebtoken": "9.0.0",
"minimist": "^1.2.7", "minimist": "^1.2.7",
"node-forge": "1.3.1", "node-forge": "1.3.1",
"node-os-utils": "1.3.7", "node-os-utils": "1.3.7",
"p-all": "4.0.0", "p-all": "4.0.0",
"p-throttle": "5.0.0", "p-throttle": "5.0.0",
"prisma": "4.6.1", "prisma": "4.8.1",
"public-ip": "6.0.1", "public-ip": "6.0.1",
"pump": "3.0.0", "pump": "3.0.0",
"shell-quote": "^1.7.4", "shell-quote": "^1.7.4",
"socket.io": "4.5.3", "socket.io": "4.5.4",
"ssh-config": "4.1.6", "ssh-config": "4.2.0",
"strip-ansi": "7.0.1", "strip-ansi": "7.0.1",
"unique-names-generator": "4.7.1" "unique-names-generator": "4.7.1"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "18.11.9", "@types/node": "18.11.18",
"@types/node-os-utils": "1.3.0", "@types/node-os-utils": "1.3.0",
"@typescript-eslint/eslint-plugin": "5.44.0", "@typescript-eslint/eslint-plugin": "5.48.1",
"@typescript-eslint/parser": "5.44.0", "@typescript-eslint/parser": "5.48.1",
"esbuild": "0.15.15", "esbuild": "0.16.16",
"eslint": "8.28.0", "eslint": "8.31.0",
"eslint-config-prettier": "8.5.0", "eslint-config-prettier": "8.6.0",
"eslint-plugin-prettier": "4.2.1", "eslint-plugin-prettier": "4.2.1",
"nodemon": "2.0.20", "nodemon": "2.0.20",
"prettier": "2.7.1", "prettier": "2.8.2",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"tsconfig-paths": "4.1.0", "tsconfig-paths": "4.1.2",
"types-fastify-socket.io": "0.0.1", "types-fastify-socket.io": "0.0.1",
"typescript": "4.9.3" "typescript": "4.9.4"
}, },
"prisma": { "prisma": {
"seed": "node prisma/seed.js" "seed": "node prisma/seed.js"

View File

@@ -0,0 +1,27 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_GitSource" (
"id" TEXT NOT NULL PRIMARY KEY,
"name" TEXT NOT NULL,
"forPublic" BOOLEAN NOT NULL DEFAULT false,
"type" TEXT,
"apiUrl" TEXT,
"htmlUrl" TEXT,
"customPort" INTEGER NOT NULL DEFAULT 22,
"customUser" TEXT NOT NULL DEFAULT 'git',
"organization" TEXT,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
"githubAppId" TEXT,
"gitlabAppId" TEXT,
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt" FROM "GitSource";
DROP TABLE "GitSource";
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -0,0 +1,24 @@
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_ApplicationSettings" (
"id" TEXT NOT NULL PRIMARY KEY,
"applicationId" TEXT NOT NULL,
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
"debug" BOOLEAN NOT NULL DEFAULT false,
"previews" BOOLEAN NOT NULL DEFAULT false,
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
"isBot" BOOLEAN NOT NULL DEFAULT false,
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
"isHttp2" BOOLEAN NOT NULL DEFAULT false,
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" DATETIME NOT NULL,
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
DROP TABLE "ApplicationSettings";
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -186,6 +186,7 @@ model ApplicationSettings {
isPublicRepository Boolean @default(false) isPublicRepository Boolean @default(false)
isDBBranching Boolean @default(false) isDBBranching Boolean @default(false)
isCustomSSL Boolean @default(false) isCustomSSL Boolean @default(false)
isHttp2 Boolean @default(false)
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt
application Application @relation(fields: [applicationId], references: [id]) application Application @relation(fields: [applicationId], references: [id])
@@ -325,6 +326,7 @@ model GitSource {
apiUrl String? apiUrl String?
htmlUrl String? htmlUrl String?
customPort Int @default(22) customPort Int @default(22)
customUser String @default("git")
organization String? organization String?
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt

View File

@@ -156,7 +156,7 @@ const host = '0.0.0.0';
graceful.listen(); graceful.listen();
setInterval(async () => { setInterval(async () => {
if (!scheduler.workers.has('deployApplication')) { if (!scheduler.workers.has('deployApplication')) {
scheduler.run('deployApplication'); scheduler.run('deployApplication');
} }
}, 2000); }, 2000);
@@ -171,6 +171,11 @@ const host = '0.0.0.0';
await cleanupStorage(); await cleanupStorage();
}, 60000 * 15); }, 60000 * 15);
// Cleanup stucked containers (not defined in Coolify, but still running and managed by Coolify)
setInterval(async () => {
await cleanupStuckedContainers();
}, 60000);
// checkProxies, checkFluentBit & refresh templates // checkProxies, checkFluentBit & refresh templates
setInterval(async () => { setInterval(async () => {
await checkProxies(); await checkProxies();
@@ -197,7 +202,13 @@ const host = '0.0.0.0';
await copySSLCertificates(); await copySSLCertificates();
}, 10000); }, 10000);
await Promise.all([getTagsTemplates(), getArch(), getIPAddress(), configureRemoteDockers()]); await Promise.all([
getTagsTemplates(),
getArch(),
getIPAddress(),
configureRemoteDockers()
// cleanupStuckedContainers()
]);
} catch (error) { } catch (error) {
console.error(error); console.error(error);
process.exit(1); process.exit(1);
@@ -225,8 +236,22 @@ async function getTagsTemplates() {
const { default: got } = await import('got'); const { default: got } = await import('got');
try { try {
if (isDev) { if (isDev) {
const templates = await fs.readFile('./devTemplates.yaml', 'utf8'); let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
const tags = await fs.readFile('./devTags.json', 'utf8'); let tags = await fs.readFile('./devTags.json', 'utf8');
try {
if (await fs.stat('./testTemplate.yaml')) {
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
}
} catch (error) {}
try {
if (await fs.stat('./testTags.json')) {
const testTags = await fs.readFile('./testTags.json', 'utf8');
if (testTags.length > 0) {
tags = JSON.stringify(JSON.parse(tags).concat(JSON.parse(testTags)));
}
}
} catch (error) {}
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates))); await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)));
await fs.writeFile('./tags.json', tags); await fs.writeFile('./tags.json', tags);
console.log('[004] Tags and templates loaded in dev mode...'); console.log('[004] Tags and templates loaded in dev mode...');
@@ -297,6 +322,49 @@ async function getArch() {
} catch (error) {} } catch (error) {}
} }
async function cleanupStuckedContainers() {
try {
const destinationDockers = await prisma.destinationDocker.findMany();
let enginesDone = new Set();
for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress))
return;
if (destination.engine) {
enginesDone.add(destination.engine);
}
if (destination.remoteIpAddress) {
if (!destination.remoteVerified) continue;
enginesDone.add(destination.remoteIpAddress);
}
const { stdout: containers } = await executeCommand({
dockerId: destination.id,
command: `docker container ps -a --filter "label=coolify.managed=true" --format '{{ .Names}}'`
});
if (containers) {
const containersArray = containers.trim().split('\n');
if (containersArray.length > 0) {
for (const container of containersArray) {
const containerId = container.split('-')[0];
const application = await prisma.application.findFirst({
where: { id: { startsWith: containerId } }
});
const service = await prisma.service.findFirst({
where: { id: { startsWith: containerId } }
});
const database = await prisma.database.findFirst({
where: { id: { startsWith: containerId } }
});
if (!application && !service && !database) {
await executeCommand({ command: `docker container rm -f ${container}` });
}
}
}
}
}
} catch (error) {
console.log(error);
}
}
async function configureRemoteDockers() { async function configureRemoteDockers() {
try { try {
const remoteDocker = await prisma.destinationDocker.findMany({ const remoteDocker = await prisma.destinationDocker.findMany({
@@ -529,9 +597,13 @@ async function cleanupStorage() {
let enginesDone = new Set(); let enginesDone = new Set();
for (const destination of destinationDockers) { for (const destination of destinationDockers) {
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return; if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return;
if (destination.engine) enginesDone.add(destination.engine); if (destination.engine) {
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress); enginesDone.add(destination.engine);
let force = false; }
if (destination.remoteIpAddress) {
if (!destination.remoteVerified) continue;
enginesDone.add(destination.remoteIpAddress);
}
let lowDiskSpace = false; let lowDiskSpace = false;
try { try {
let stdout = null; let stdout = null;
@@ -577,6 +649,8 @@ async function cleanupStorage() {
} }
} }
} catch (error) {} } catch (error) {}
await cleanupDockerStorage(destination.id, lowDiskSpace, force); if (lowDiskSpace) {
await cleanupDockerStorage(destination.id);
}
} }
} }

View File

@@ -196,7 +196,7 @@ import * as buildpacks from '../lib/buildPacks';
await executeCommand({ await executeCommand({
debug: true, debug: true,
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d` command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
}); });
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId }); await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) { } catch (error) {
@@ -419,6 +419,7 @@ import * as buildpacks from '../lib/buildPacks';
githubAppId: gitSource.githubApp?.id, githubAppId: gitSource.githubApp?.id,
gitlabAppId: gitSource.gitlabApp?.id, gitlabAppId: gitSource.gitlabApp?.id,
customPort: gitSource.customPort, customPort: gitSource.customPort,
customUser: gitSource.customUser,
gitCommitHash, gitCommitHash,
configuration, configuration,
repository, repository,
@@ -600,6 +601,7 @@ import * as buildpacks from '../lib/buildPacks';
} }
if (buildPack === 'compose') { if (buildPack === 'compose') {
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
try { try {
const { stdout: containers } = await executeCommand({ const { stdout: containers } = await executeCommand({
dockerId: destinationDockerId, dockerId: destinationDockerId,
@@ -629,7 +631,7 @@ import * as buildpacks from '../lib/buildPacks';
buildId, buildId,
applicationId, applicationId,
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d` command: `docker compose --project-directory ${workdir} -f ${fileYaml} up -d`
}); });
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId }); await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
await prisma.build.update({ await prisma.build.update({
@@ -724,7 +726,7 @@ import * as buildpacks from '../lib/buildPacks';
await executeCommand({ await executeCommand({
debug, debug,
dockerId: destinationDocker.id, dockerId: destinationDocker.id,
command: `docker compose --project-directory ${workdir} up -d` command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
}); });
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId }); await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
} catch (error) { } catch (error) {

View File

@@ -26,8 +26,10 @@ export default async function (data) {
throw 'No Services found in docker-compose file.'; throw 'No Services found in docker-compose file.';
} }
let envs = []; let envs = [];
let buildEnvs = [];
if (secrets.length > 0) { if (secrets.length > 0) {
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)]; envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
buildEnvs = [...buildEnvs, ...generateSecrets(secrets, pullmergeRequestId, true, null, true)];
} }
const composeVolumes = []; const composeVolumes = [];
@@ -43,8 +45,34 @@ export default async function (data) {
let networks = {}; let networks = {};
for (let [key, value] of Object.entries(dockerComposeYaml.services)) { for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
value['container_name'] = `${applicationId}-${key}`; value['container_name'] = `${applicationId}-${key}`;
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment']
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
if (Object.keys(environment).length > 0) {
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
}
value['environment'] = [...environment, ...envs]; value['environment'] = [...environment, ...envs];
let build = typeof value['build'] === 'undefined' ? [] : value['build'];
if (typeof build === 'string') {
build = { context: build };
}
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
let finalArgs = [...buildEnvs];
if (Object.keys(buildArgs).length > 0) {
for (const arg of buildArgs) {
const [key, _] = arg.split('=');
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
finalArgs.push(arg);
}
}
}
if (build.length > 0 || buildArgs.length > 0 ) {
value['build'] = {
...build,
args: finalArgs
};
}
value['labels'] = labels; value['labels'] = labels;
// TODO: If we support separated volume for each service, we need to add it here // TODO: If we support separated volume for each service, we need to add it here
if (value['volumes']?.length > 0) { if (value['volumes']?.length > 0) {
@@ -95,7 +123,7 @@ export default async function (data) {
buildId, buildId,
applicationId, applicationId,
dockerId, dockerId,
command: `docker compose --project-directory ${workdir} pull` command: `docker compose --project-directory ${workdir} -f ${fileYaml} pull`
}); });
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId }); await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
await executeCommand({ await executeCommand({
@@ -103,7 +131,7 @@ export default async function (data) {
buildId, buildId,
applicationId, applicationId,
dockerId, dockerId,
command: `docker compose --project-directory ${workdir} build --progress plain` command: `docker compose --project-directory ${workdir} -f ${fileYaml} build --progress plain`
}); });
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId }); await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
} }

View File

@@ -1,12 +1,18 @@
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { generateSecrets } from '../common';
import { buildCacheImageForLaravel, buildImage } from './common'; import { buildCacheImageForLaravel, buildImage } from './common';
const createDockerfile = async (data, image): Promise<void> => { const createDockerfile = async (data, image): Promise<void> => {
const { workdir, applicationId, tag, buildId, port } = data; const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
const Dockerfile: Array<string> = []; const Dockerfile: Array<string> = [];
Dockerfile.push(`FROM ${image}`); Dockerfile.push(`FROM ${image}`);
Dockerfile.push(`LABEL coolify.buildId=${buildId}`); Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
if (secrets.length > 0) {
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
Dockerfile.push(env);
});
}
Dockerfile.push('WORKDIR /app'); Dockerfile.push('WORKDIR /app');
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`); Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
Dockerfile.push(`COPY --chown=application:application composer.* ./`); Dockerfile.push(`COPY --chown=application:application composer.* ./`);

View File

@@ -19,7 +19,7 @@ import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common
import { scheduler } from './scheduler'; import { scheduler } from './scheduler';
import type { ExecaChildProcess } from 'execa'; import type { ExecaChildProcess } from 'execa';
export const version = '3.12.8'; export const version = '3.12.18';
export const isDev = process.env.NODE_ENV === 'development'; export const isDev = process.env.NODE_ENV === 'development';
export const sentryDSN = export const sentryDSN =
'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216'; 'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216';
@@ -714,8 +714,10 @@ export async function startTraefikProxy(id: string): Promise<void> {
--network coolify-infra \ --network coolify-infra \
-p "80:80" \ -p "80:80" \
-p "443:443" \ -p "443:443" \
${isDev ? '-p "8080:8080"' : ''} \
--name coolify-proxy \ --name coolify-proxy \
-d ${defaultTraefikImage} \ -d ${defaultTraefikImage} \
${isDev ? '--api.insecure=true' : ''} \
--entrypoints.web.address=:80 \ --entrypoints.web.address=:80 \
--entrypoints.web.forwardedHeaders.insecure=true \ --entrypoints.web.forwardedHeaders.insecure=true \
--entrypoints.websecure.address=:443 \ --entrypoints.websecure.address=:443 \
@@ -1712,78 +1714,24 @@ export function convertTolOldVolumeNames(type) {
} }
} }
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) { export async function cleanupDockerStorage(dockerId) {
// Cleanup old coolify images // Cleanup images that are not used by any container
try { try {
let { stdout: images } = await executeCommand({ await executeCommand({ dockerId, command: `docker image prune -af` });
dockerId,
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`,
shell: true
});
images = images.trim();
if (images) {
await executeCommand({
dockerId,
command: `docker rmi -f ${images}" -q | xargs -r`,
shell: true
});
}
} catch (error) {} } catch (error) {}
if (lowDiskSpace || force) {
// Cleanup images that are not used
try {
await executeCommand({ dockerId, command: `docker image prune -f` });
} catch (error) {}
const { numberOfDockerImagesKeptLocally } = await prisma.setting.findUnique({ // Prune coolify managed containers
where: { id: '0' } try {
}); await executeCommand({
const { stdout: images } = await executeCommand({
dockerId, dockerId,
command: `docker images|grep -v "<none>"|grep -v REPOSITORY|awk '{print $1, $2}'`, command: `docker container prune -f --filter "label=coolify.managed=true"`
shell: true
}); });
const imagesArray = images.trim().replaceAll(' ', ':').split('\n'); } catch (error) {}
const imagesSet = new Set(imagesArray.map((image) => image.split(':')[0]));
let deleteImage = [];
for (const image of imagesSet) {
let keepImage = [];
for (const image2 of imagesArray) {
if (image2.startsWith(image)) {
if (force) {
deleteImage.push(image2);
continue;
}
if (keepImage.length >= numberOfDockerImagesKeptLocally) {
deleteImage.push(image2);
} else {
keepImage.push(image2);
}
}
}
}
for (const image of deleteImage) {
try {
await executeCommand({ dockerId, command: `docker image rm -f ${image}` });
} catch (error) {
console.log(error);
}
}
// Prune coolify managed containers // Cleanup build caches
try { try {
await executeCommand({ await executeCommand({ dockerId, command: `docker builder prune -af` });
dockerId, } catch (error) {}
command: `docker container prune -f --filter "label=coolify.managed=true"`
});
} catch (error) {}
// Cleanup build caches
try {
await executeCommand({ dockerId, command: `docker builder prune -a -f` });
} catch (error) {}
}
} }
export function persistentVolumes(id, persistentStorage, config) { export function persistentVolumes(id, persistentStorage, config) {
@@ -1884,11 +1832,36 @@ export async function pushToRegistry(
}); });
} }
function parseSecret(secret, isBuild) {
if (secret.value.includes('$')) {
secret.value = secret.value.replaceAll('$', '$$$$');
}
if (secret.value.includes('\\n')) {
if (isBuild) {
return `ARG ${secret.name}=${secret.value}`;
} else {
return `${secret.name}=${secret.value}`;
}
} else if (secret.value.includes(' ')) {
if (isBuild) {
return `ARG ${secret.name}='${secret.value}'`;
} else {
return `${secret.name}='${secret.value}'`;
}
} else {
if (isBuild) {
return `ARG ${secret.name}=${secret.value}`;
} else {
return `${secret.name}=${secret.value}`;
}
}
}
export function generateSecrets( export function generateSecrets(
secrets: Array<any>, secrets: Array<any>,
pullmergeRequestId: string, pullmergeRequestId: string,
isBuild = false, isBuild = false,
port = null port = null,
compose = false
): Array<string> { ): Array<string> {
const envs = []; const envs = [];
const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret); const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret);
@@ -1899,15 +1872,7 @@ export function generateSecrets(
return; return;
} }
const build = isBuild && secret.isBuildSecret; const build = isBuild && secret.isBuildSecret;
if (build) { envs.push(parseSecret(secret, compose ? false : build));
if (secret.value.includes(' ') || secret.value.includes('\\n')) {
envs.push(`ARG ${secret.name}='${secret.value}'`);
} else {
envs.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
envs.push(`${secret.name}=${secret.value}`);
}
}); });
} }
if (!pullmergeRequestId && normalSecrets.length > 0) { if (!pullmergeRequestId && normalSecrets.length > 0) {
@@ -1916,15 +1881,7 @@ export function generateSecrets(
return; return;
} }
const build = isBuild && secret.isBuildSecret; const build = isBuild && secret.isBuildSecret;
if (build) { envs.push(parseSecret(secret, compose ? false : build));
if (secret.value.includes(' ') || secret.value.includes('\\n')) {
envs.push(`ARG ${secret.name}='${secret.value}'`);
} else {
envs.push(`ARG ${secret.name}=${secret.value}`);
}
} else {
envs.push(`${secret.name}=${secret.value}`);
}
}); });
} }
const portFound = envs.filter((env) => env.startsWith('PORT')); const portFound = envs.filter((env) => env.startsWith('PORT'));

View File

@@ -12,7 +12,8 @@ export default async function ({
buildId, buildId,
privateSshKey, privateSshKey,
customPort, customPort,
forPublic forPublic,
customUser,
}: { }: {
applicationId: string; applicationId: string;
workdir: string; workdir: string;
@@ -25,6 +26,7 @@ export default async function ({
privateSshKey: string; privateSshKey: string;
customPort: number; customPort: number;
forPublic: boolean; forPublic: boolean;
customUser: string;
}): Promise<string> { }): Promise<string> {
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, ''); const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
if (!forPublic) { if (!forPublic) {
@@ -53,7 +55,7 @@ export default async function ({
} else { } else {
await executeCommand({ await executeCommand({
command: command:
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true `git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
} }
); );
} }

View File

@@ -122,6 +122,9 @@ export async function cleanupUnconfiguredApplications(request: FastifyRequest<an
include: { settings: true, destinationDocker: true, teams: true } include: { settings: true, destinationDocker: true, teams: true }
}); });
for (const application of applications) { for (const application of applications) {
if (application?.buildPack === 'compose') {
continue;
}
if ( if (
!application.buildPack || !application.buildPack ||
!application.destinationDockerId || !application.destinationDockerId ||
@@ -500,14 +503,24 @@ export async function saveApplicationSettings(
projectId, projectId,
isBot, isBot,
isDBBranching, isDBBranching,
isCustomSSL isCustomSSL,
isHttp2
} = request.body; } = request.body;
await prisma.application.update({ await prisma.application.update({
where: { id }, where: { id },
data: { data: {
fqdn: isBot ? null : undefined, fqdn: isBot ? null : undefined,
settings: { settings: {
update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching, isCustomSSL } update: {
debug,
previews,
dualCerts,
autodeploy,
isBot,
isDBBranching,
isCustomSSL,
isHttp2
}
} }
}, },
include: { destinationDocker: true } include: { destinationDocker: true }
@@ -670,7 +683,7 @@ export async function restartApplication(
await executeCommand({ await executeCommand({
dockerId, dockerId,
command: `docker compose --project-directory ${workdir} up -d` command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
}); });
return reply.code(201).send(); return reply.code(201).send();
} }
@@ -719,14 +732,15 @@ export async function deleteApplication(
) { ) {
try { try {
const { id } = request.params; const { id } = request.params;
const { force } = request.body;
const { teamId } = request.user; const { teamId } = request.user;
const application = await prisma.application.findUnique({ const application = await prisma.application.findUnique({
where: { id }, where: { id },
include: { destinationDocker: true } include: { destinationDocker: true, teams: true }
}); });
if (!force && application?.destinationDockerId && application.destinationDocker?.network) { if (!application.teams.find((team) => team.id === teamId) || teamId !== '0') {
throw { status: 403, message: 'You are not allowed to delete this application.' };
}
if (application?.destinationDocker?.id && application.destinationDocker?.network) {
const { stdout: containers } = await executeCommand({ const { stdout: containers } = await executeCommand({
dockerId: application.destinationDocker.id, dockerId: application.destinationDocker.id,
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'` command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
@@ -746,6 +760,7 @@ export async function deleteApplication(
await prisma.secret.deleteMany({ where: { applicationId: id } }); await prisma.secret.deleteMany({ where: { applicationId: id } });
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } }); await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: id } }); await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: id } });
await prisma.previewApplication.deleteMany({ where: { applicationId: id } });
if (teamId === '0') { if (teamId === '0') {
await prisma.application.deleteMany({ where: { id } }); await prisma.application.deleteMany({ where: { id } });
} else { } else {
@@ -764,7 +779,9 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
fqdn, fqdn,
settings: { dualCerts } settings: { dualCerts }
} = await prisma.application.findUnique({ where: { id }, include: { settings: true } }); } = await prisma.application.findUnique({ where: { id }, include: { settings: true } });
return await checkDomainsIsValidInDNS({ hostname: domain, fqdn, dualCerts }); // TODO: Disabled this because it is having problems with remote docker engines.
// return await checkDomainsIsValidInDNS({ hostname: domain, fqdn, dualCerts });
return {};
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
} }
@@ -805,11 +822,12 @@ export async function checkDNS(request: FastifyRequest<CheckDNS>) {
remoteEngine, remoteEngine,
remoteIpAddress remoteIpAddress
}); });
if (isDNSCheckEnabled && !isDev && !forceSave) { // TODO: Disabled this because it is having problems with remote docker engines.
let hostname = request.hostname.split(':')[0]; // if (isDNSCheckEnabled && !isDev && !forceSave) {
if (remoteEngine) hostname = remoteIpAddress; // let hostname = request.hostname.split(':')[0];
return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }); // if (remoteEngine) hostname = remoteIpAddress;
} // return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts });
// }
return {}; return {};
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
@@ -842,15 +860,16 @@ export async function getDockerImages(request) {
try { try {
const { stdout } = await executeCommand({ const { stdout } = await executeCommand({
dockerId: application.destinationDocker.id, dockerId: application.destinationDocker.id,
command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}' | grep -i ${id} | grep -v cache`, command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}'`
shell: true
}); });
const { stdout: runningImage } = await executeCommand({ const { stdout: runningImage } = await executeCommand({
dockerId: application.destinationDocker.id, dockerId: application.destinationDocker.id,
command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}` command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}`
}); });
const images = stdout.trim().split('\n'); const images = stdout
.trim()
.split('\n')
.filter((image) => image.includes(id) && !image.includes('-cache'));
for (const image of images) { for (const image of images) {
const [repository, tag, createdAt] = image.split('#'); const [repository, tag, createdAt] = image.split('#');
if (tag.includes('-')) { if (tag.includes('-')) {
@@ -871,6 +890,7 @@ export async function getDockerImages(request) {
runningImage runningImage
}; };
} catch (error) { } catch (error) {
console.log(error);
return { return {
imagesAvailables imagesAvailables
}; };
@@ -1446,7 +1466,7 @@ export async function restartPreview(
await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` }); await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` });
await executeCommand({ await executeCommand({
dockerId, dockerId,
command: `docker compose --project-directory ${workdir} up -d` command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
}); });
return reply.code(201).send(); return reply.code(201).send();
} }
@@ -1600,12 +1620,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
.split('\n') .split('\n')
.map((l) => ansi(l)) .map((l) => ansi(l))
.filter((a) => a); .filter((a) => a);
const logs = stripLogsStderr.concat(stripLogsStdout); return { logs: stripLogsStderr.concat(stripLogsStdout) };
const sortedLogs = logs.sort((a, b) =>
day(a.split(' ')[0]).isAfter(day(b.split(' ')[0])) ? 1 : -1
);
return { logs: sortedLogs };
// }
} catch (error) { } catch (error) {
const { statusCode, stderr } = error; const { statusCode, stderr } = error;
if (stderr.startsWith('Error: No such container')) { if (stderr.startsWith('Error: No such container')) {

View File

@@ -1,154 +1,170 @@
import type { OnlyId } from "../../../../types"; import type { OnlyId } from '../../../../types';
export interface SaveApplication extends OnlyId { export interface SaveApplication extends OnlyId {
Body: { Body: {
name: string, name: string;
buildPack: string, buildPack: string;
fqdn: string, fqdn: string;
port: number, port: number;
exposePort: number, exposePort: number;
installCommand: string, installCommand: string;
buildCommand: string, buildCommand: string;
startCommand: string, startCommand: string;
baseDirectory: string, baseDirectory: string;
publishDirectory: string, publishDirectory: string;
pythonWSGI: string, pythonWSGI: string;
pythonModule: string, pythonModule: string;
pythonVariable: string, pythonVariable: string;
dockerFileLocation: string, dockerFileLocation: string;
denoMainFile: string, denoMainFile: string;
denoOptions: string, denoOptions: string;
baseImage: string, baseImage: string;
gitCommitHash: string, gitCommitHash: string;
baseBuildImage: string, baseBuildImage: string;
deploymentType: string, deploymentType: string;
baseDatabaseBranch: string, baseDatabaseBranch: string;
dockerComposeFile: string, dockerComposeFile: string;
dockerComposeFileLocation: string, dockerComposeFileLocation: string;
dockerComposeConfiguration: string, dockerComposeConfiguration: string;
simpleDockerfile: string, simpleDockerfile: string;
dockerRegistryImageName: string dockerRegistryImageName: string;
} };
} }
export interface SaveApplicationSettings extends OnlyId { export interface SaveApplicationSettings extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string };
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean, isCustomSSL: boolean }; Body: {
debug: boolean;
previews: boolean;
dualCerts: boolean;
autodeploy: boolean;
branch: string;
projectId: number;
isBot: boolean;
isDBBranching: boolean;
isCustomSSL: boolean;
isHttp2: boolean;
};
} }
export interface DeleteApplication extends OnlyId { export interface DeleteApplication extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string };
Body: { force: boolean } Body: { force: boolean };
} }
export interface CheckDomain extends OnlyId { export interface CheckDomain extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string };
} }
export interface CheckDNS extends OnlyId { export interface CheckDNS extends OnlyId {
Querystring: { domain: string; }; Querystring: { domain: string };
Body: { Body: {
exposePort: number, exposePort: number;
fqdn: string, fqdn: string;
forceSave: boolean, forceSave: boolean;
dualCerts: boolean dualCerts: boolean;
} };
} }
export interface DeployApplication { export interface DeployApplication {
Querystring: { domain: string } Querystring: { domain: string };
Body: { pullmergeRequestId: string | null, branch: string, forceRebuild?: boolean } Body: { pullmergeRequestId: string | null; branch: string; forceRebuild?: boolean };
} }
export interface GetImages { export interface GetImages {
Body: { buildPack: string, deploymentType: string } Body: { buildPack: string; deploymentType: string };
} }
export interface SaveApplicationSource extends OnlyId { export interface SaveApplicationSource extends OnlyId {
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string, simpleDockerfile?: string } Body: {
gitSourceId?: string | null;
forPublic?: boolean;
type?: string;
simpleDockerfile?: string;
};
} }
export interface CheckRepository extends OnlyId { export interface CheckRepository extends OnlyId {
Querystring: { repository: string, branch: string } Querystring: { repository: string; branch: string };
} }
export interface SaveDestination extends OnlyId { export interface SaveDestination extends OnlyId {
Body: { destinationId: string } Body: { destinationId: string };
} }
export interface SaveSecret extends OnlyId { export interface SaveSecret extends OnlyId {
Body: { Body: {
name: string, name: string;
value: string, value: string;
isBuildSecret: boolean, isBuildSecret: boolean;
previewSecret: boolean, previewSecret: boolean;
isNew: boolean isNew: boolean;
} };
} }
export interface DeleteSecret extends OnlyId { export interface DeleteSecret extends OnlyId {
Body: { name: string } Body: { name: string };
} }
export interface SaveStorage extends OnlyId { export interface SaveStorage extends OnlyId {
Body: { Body: {
path: string, path: string;
newStorage: boolean, newStorage: boolean;
storageId: string storageId: string;
} };
} }
export interface DeleteStorage extends OnlyId { export interface DeleteStorage extends OnlyId {
Body: { Body: {
path: string, path: string;
} };
} }
export interface GetApplicationLogs { export interface GetApplicationLogs {
Params: { Params: {
id: string, id: string;
containerId: string containerId: string;
} };
Querystring: { Querystring: {
since: number, since: number;
} };
} }
export interface GetBuilds extends OnlyId { export interface GetBuilds extends OnlyId {
Querystring: { Querystring: {
buildId: string buildId: string;
skip: number, skip: number;
} };
} }
export interface GetBuildIdLogs { export interface GetBuildIdLogs {
Params: { Params: {
id: string, id: string;
buildId: string buildId: string;
}, };
Querystring: { Querystring: {
sequence: number sequence: number;
} };
} }
export interface SaveDeployKey extends OnlyId { export interface SaveDeployKey extends OnlyId {
Body: { Body: {
deployKeyId: number deployKeyId: number;
} };
} }
export interface CancelDeployment { export interface CancelDeployment {
Body: { Body: {
buildId: string, buildId: string;
applicationId: string applicationId: string;
} };
} }
export interface DeployApplication extends OnlyId { export interface DeployApplication extends OnlyId {
Body: { Body: {
pullmergeRequestId: string | null, pullmergeRequestId: string | null;
branch: string, branch: string;
forceRebuild?: boolean forceRebuild?: boolean;
} };
} }
export interface StopPreviewApplication extends OnlyId { export interface StopPreviewApplication extends OnlyId {
Body: { Body: {
pullmergeRequestId: string | null, pullmergeRequestId: string | null;
} };
} }
export interface RestartPreviewApplication { export interface RestartPreviewApplication {
Params: { Params: {
id: string, id: string;
pullmergeRequestId: string | null, pullmergeRequestId: string | null;
} };
} }
export interface RestartApplication { export interface RestartApplication {
Params: { Params: {
id: string, id: string;
}, };
Body: { Body: {
imageId: string | null, imageId: string | null;
} };
} }

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ export interface SaveDatabaseType extends OnlyId {
Body: { type: string } Body: { type: string }
} }
export interface DeleteDatabase extends OnlyId { export interface DeleteDatabase extends OnlyId {
Body: { force: string } Body: { }
} }
export interface SaveVersion extends OnlyId { export interface SaveVersion extends OnlyId {
Body: { Body: {

View File

@@ -1,6 +1,6 @@
import { compareVersions } from "compare-versions"; import { compareVersions } from 'compare-versions';
import cuid from "cuid"; import cuid from 'cuid';
import bcrypt from "bcryptjs"; import bcrypt from 'bcryptjs';
import fs from 'fs/promises'; import fs from 'fs/promises';
import yaml from 'js-yaml'; import yaml from 'js-yaml';
import { import {
@@ -13,12 +13,12 @@ import {
uniqueName, uniqueName,
version, version,
sentryDSN, sentryDSN,
executeCommand, executeCommand
} from "../../../lib/common"; } from '../../../lib/common';
import { scheduler } from "../../../lib/scheduler"; import { scheduler } from '../../../lib/scheduler';
import type { FastifyReply, FastifyRequest } from "fastify"; import type { FastifyReply, FastifyRequest } from 'fastify';
import type { Login, Update } from "."; import type { Login, Update } from '.';
import type { GetCurrentUser } from "./types"; import type { GetCurrentUser } from './types';
export async function hashPassword(password: string): Promise<string> { export async function hashPassword(password: string): Promise<string> {
const saltRounds = 15; const saltRounds = 15;
@@ -29,9 +29,9 @@ export async function backup(request: FastifyRequest) {
try { try {
const { backupData } = request.params; const { backupData } = request.params;
let std = null; let std = null;
const [id, backupType, type, zipped, storage] = backupData.split(':') const [id, backupType, type, zipped, storage] = backupData.split(':');
console.log(id, backupType, type, zipped, storage) console.log(id, backupType, type, zipped, storage);
const database = await prisma.database.findUnique({ where: { id } }) const database = await prisma.database.findUnique({ where: { id } });
if (database) { if (database) {
// await executeDockerCmd({ // await executeDockerCmd({
// dockerId: database.destinationDockerId, // dockerId: database.destinationDockerId,
@@ -40,8 +40,7 @@ export async function backup(request: FastifyRequest) {
std = await executeCommand({ std = await executeCommand({
dockerId: database.destinationDockerId, dockerId: database.destinationDockerId,
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup` command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
}) });
} }
if (std.stdout) { if (std.stdout) {
return std.stdout; return std.stdout;
@@ -58,9 +57,9 @@ export async function cleanupManually(request: FastifyRequest) {
try { try {
const { serverId } = request.body; const { serverId } = request.body;
const destination = await prisma.destinationDocker.findUnique({ const destination = await prisma.destinationDocker.findUnique({
where: { id: serverId }, where: { id: serverId }
}); });
await cleanupDockerStorage(destination.id, true, true); await cleanupDockerStorage(destination.id);
return {}; return {};
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
@@ -68,17 +67,25 @@ export async function cleanupManually(request: FastifyRequest) {
} }
export async function refreshTags() { export async function refreshTags() {
try { try {
const { default: got } = await import('got') const { default: got } = await import('got');
try { try {
if (isDev) { if (isDev) {
const tags = await fs.readFile('./devTags.json', 'utf8') let tags = await fs.readFile('./devTags.json', 'utf8');
await fs.writeFile('./tags.json', tags) try {
if (await fs.stat('./testTags.json')) {
const testTags = await fs.readFile('./testTags.json', 'utf8');
if (testTags.length > 0) {
tags = JSON.parse(tags).concat(JSON.parse(testTags));
}
}
} catch (error) {}
await fs.writeFile('./tags.json', tags);
} else { } else {
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text() const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text();
await fs.writeFile('/app/tags.json', tags) await fs.writeFile('/app/tags.json', tags);
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
return {}; return {};
@@ -88,17 +95,25 @@ export async function refreshTags() {
} }
export async function refreshTemplates() { export async function refreshTemplates() {
try { try {
const { default: got } = await import('got') const { default: got } = await import('got');
try { try {
if (isDev) { if (isDev) {
const response = await fs.readFile('./devTemplates.yaml', 'utf8') let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(response))) try {
if (await fs.stat('./testTemplate.yaml')) {
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
}
} catch (error) {}
const response = await fs.readFile('./devTemplates.yaml', 'utf8');
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(response)));
} else { } else {
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text() const response = await got
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response))) .get('https://get.coollabs.io/coolify/service-templates.yaml')
.text();
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)));
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
return {}; return {};
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -107,28 +122,29 @@ export async function refreshTemplates() {
} }
export async function checkUpdate(request: FastifyRequest) { export async function checkUpdate(request: FastifyRequest) {
try { try {
const { default: got } = await import('got') const { default: got } = await import('got');
const isStaging = const isStaging =
request.hostname === "staging.coolify.io" || request.hostname === 'staging.coolify.io' || request.hostname === 'arm.coolify.io';
request.hostname === "arm.coolify.io";
const currentVersion = version; const currentVersion = version;
const { coolify } = await got.get('https://get.coollabs.io/versions.json', { const { coolify } = await got
searchParams: { .get('https://get.coollabs.io/versions.json', {
appId: process.env['COOLIFY_APP_ID'] || undefined, searchParams: {
version: currentVersion appId: process.env['COOLIFY_APP_ID'] || undefined,
} version: currentVersion
}).json() }
})
.json();
const latestVersion = coolify.main.version; const latestVersion = coolify.main.version;
const isUpdateAvailable = compareVersions(latestVersion, currentVersion); const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
if (isStaging) { if (isStaging) {
return { return {
isUpdateAvailable: true, isUpdateAvailable: true,
latestVersion: "next", latestVersion: 'next'
}; };
} }
return { return {
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1, isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
latestVersion, latestVersion
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
@@ -142,8 +158,13 @@ export async function update(request: FastifyRequest<Update>) {
const { isAutoUpdateEnabled } = await prisma.setting.findFirst(); const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` }); await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` }); await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` }); await executeCommand({
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` }); command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
});
await executeCommand({
shell: true,
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
});
return {}; return {};
} else { } else {
await asyncSleep(2000); await asyncSleep(2000);
@@ -156,12 +177,12 @@ export async function update(request: FastifyRequest<Update>) {
export async function resetQueue(request: FastifyRequest<any>) { export async function resetQueue(request: FastifyRequest<any>) {
try { try {
const teamId = request.user.teamId; const teamId = request.user.teamId;
if (teamId === "0") { if (teamId === '0') {
await prisma.build.updateMany({ await prisma.build.updateMany({
where: { status: { in: ["queued", "running"] } }, where: { status: { in: ['queued', 'running'] } },
data: { status: "canceled" }, data: { status: 'canceled' }
}); });
scheduler.workers.get("deployApplication").postMessage("cancel"); scheduler.workers.get('deployApplication').postMessage('cancel');
} }
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
@@ -170,7 +191,7 @@ export async function resetQueue(request: FastifyRequest<any>) {
export async function restartCoolify(request: FastifyRequest<any>) { export async function restartCoolify(request: FastifyRequest<any>) {
try { try {
const teamId = request.user.teamId; const teamId = request.user.teamId;
if (teamId === "0") { if (teamId === '0') {
if (!isDev) { if (!isDev) {
await executeCommand({ command: `docker restart coolify` }); await executeCommand({ command: `docker restart coolify` });
return {}; return {};
@@ -180,7 +201,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
} }
throw { throw {
status: 500, status: 500,
message: "You are not authorized to restart Coolify.", message: 'You are not authorized to restart Coolify.'
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
@@ -192,43 +213,52 @@ export async function showDashboard(request: FastifyRequest) {
const userId = request.user.userId; const userId = request.user.userId;
const teamId = request.user.teamId; const teamId = request.user.teamId;
let applications = await prisma.application.findMany({ let applications = await prisma.application.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } }, where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }, include: { settings: true, destinationDocker: true, teams: true }
}); });
const databases = await prisma.database.findMany({ const databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } }, where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }, include: { settings: true, destinationDocker: true, teams: true }
}); });
const services = await prisma.service.findMany({ const services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } }, where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, teams: true }, include: { destinationDocker: true, teams: true }
}); });
const gitSources = await prisma.gitSource.findMany({ const gitSources = await prisma.gitSource.findMany({
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] }, where: {
include: { teams: true }, OR: [
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
{ isSystemWide: true }
]
},
include: { teams: true }
}); });
const destinations = await prisma.destinationDocker.findMany({ const destinations = await prisma.destinationDocker.findMany({
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } }, where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { teams: true }, include: { teams: true }
}); });
const settings = await listSettings(); const settings = await listSettings();
let foundUnconfiguredApplication = false; let foundUnconfiguredApplication = false;
for (const application of applications) { for (const application of applications) {
if (((!application.buildPack || !application.branch) && !application.simpleDockerfile) || !application.destinationDockerId || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") { if (
foundUnconfiguredApplication = true ((!application.buildPack || !application.branch) && !application.simpleDockerfile) ||
!application.destinationDockerId ||
(!application.settings?.isBot && !application?.fqdn && application.buildPack !== 'compose')
) {
foundUnconfiguredApplication = true;
} }
} }
let foundUnconfiguredService = false; let foundUnconfiguredService = false;
for (const service of services) { for (const service of services) {
if (!service.fqdn) { if (!service.fqdn) {
foundUnconfiguredService = true foundUnconfiguredService = true;
} }
} }
let foundUnconfiguredDatabase = false; let foundUnconfiguredDatabase = false;
for (const database of databases) { for (const database of databases) {
if (!database.version) { if (!database.version) {
foundUnconfiguredDatabase = true foundUnconfiguredDatabase = true;
} }
} }
return { return {
@@ -240,101 +270,94 @@ export async function showDashboard(request: FastifyRequest) {
services, services,
gitSources, gitSources,
destinations, destinations,
settings, settings
}; };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }); return errorHandler({ status, message });
} }
} }
export async function login( export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
request: FastifyRequest<Login>,
reply: FastifyReply
) {
if (request.user) { if (request.user) {
return reply.redirect("/dashboard"); return reply.redirect('/dashboard');
} else { } else {
const { email, password, isLogin } = request.body || {}; const { email, password, isLogin } = request.body || {};
if (!email || !password) { if (!email || !password) {
throw { status: 500, message: "Email and password are required." }; throw { status: 500, message: 'Email and password are required.' };
} }
const users = await prisma.user.count(); const users = await prisma.user.count();
const userFound = await prisma.user.findUnique({ const userFound = await prisma.user.findUnique({
where: { email }, where: { email },
include: { teams: true, permission: true }, include: { teams: true, permission: true },
rejectOnNotFound: false, rejectOnNotFound: false
}); });
if (!userFound && isLogin) { if (!userFound && isLogin) {
throw { status: 500, message: "User not found." }; throw { status: 500, message: 'User not found.' };
} }
const { isRegistrationEnabled, id } = await prisma.setting.findFirst(); const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
let uid = cuid(); let uid = cuid();
let permission = "read"; let permission = 'read';
let isAdmin = false; let isAdmin = false;
if (users === 0) { if (users === 0) {
await prisma.setting.update({ await prisma.setting.update({
where: { id }, where: { id },
data: { isRegistrationEnabled: false }, data: { isRegistrationEnabled: false }
}); });
uid = "0"; uid = '0';
} }
if (userFound) { if (userFound) {
if (userFound.type === "email") { if (userFound.type === 'email') {
if (userFound.password === "RESETME") { if (userFound.password === 'RESETME') {
const hashedPassword = await hashPassword(password); const hashedPassword = await hashPassword(password);
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) { if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
if (userFound.id === "0") { if (userFound.id === '0') {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: "RESETME" }, data: { password: 'RESETME' }
}); });
} else { } else {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: "RESETTIMEOUT" }, data: { password: 'RESETTIMEOUT' }
}); });
} }
throw { throw {
status: 500, status: 500,
message: message: 'Password reset link has expired. Please request a new one.'
"Password reset link has expired. Please request a new one.",
}; };
} else { } else {
await prisma.user.update({ await prisma.user.update({
where: { email: userFound.email }, where: { email: userFound.email },
data: { password: hashedPassword }, data: { password: hashedPassword }
}); });
return { return {
userId: userFound.id, userId: userFound.id,
teamId: userFound.id, teamId: userFound.id,
permission: userFound.permission, permission: userFound.permission,
isAdmin: true, isAdmin: true
}; };
} }
} }
const passwordMatch = await bcrypt.compare( const passwordMatch = await bcrypt.compare(password, userFound.password);
password,
userFound.password
);
if (!passwordMatch) { if (!passwordMatch) {
throw { throw {
status: 500, status: 500,
message: "Wrong password or email address.", message: 'Wrong password or email address.'
}; };
} }
uid = userFound.id; uid = userFound.id;
isAdmin = true; isAdmin = true;
} }
} else { } else {
permission = "owner"; permission = 'owner';
isAdmin = true; isAdmin = true;
if (!isRegistrationEnabled) { if (!isRegistrationEnabled) {
throw { throw {
status: 404, status: 404,
message: "Registration disabled by administrator.", message: 'Registration disabled by administrator.'
}; };
} }
const hashedPassword = await hashPassword(password); const hashedPassword = await hashPassword(password);
@@ -344,17 +367,17 @@ export async function login(
id: uid, id: uid,
email, email,
password: hashedPassword, password: hashedPassword,
type: "email", type: 'email',
teams: { teams: {
create: { create: {
id: uid, id: uid,
name: uniqueName(), name: uniqueName(),
destinationDocker: { connect: { network: "coolify" } }, destinationDocker: { connect: { network: 'coolify' } }
}, }
}, },
permission: { create: { teamId: uid, permission: "owner" } }, permission: { create: { teamId: uid, permission: 'owner' } }
}, },
include: { teams: true }, include: { teams: true }
}); });
} else { } else {
await prisma.user.create({ await prisma.user.create({
@@ -362,16 +385,16 @@ export async function login(
id: uid, id: uid,
email, email,
password: hashedPassword, password: hashedPassword,
type: "email", type: 'email',
teams: { teams: {
create: { create: {
id: uid, id: uid,
name: uniqueName(), name: uniqueName()
}, }
}, },
permission: { create: { teamId: uid, permission: "owner" } }, permission: { create: { teamId: uid, permission: 'owner' } }
}, },
include: { teams: true }, include: { teams: true }
}); });
} }
} }
@@ -379,23 +402,20 @@ export async function login(
userId: uid, userId: uid,
teamId: uid, teamId: uid,
permission, permission,
isAdmin, isAdmin
}; };
} }
} }
export async function getCurrentUser( export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) {
request: FastifyRequest<GetCurrentUser>,
fastify
) {
let token = null; let token = null;
const { teamId } = request.query; const { teamId } = request.query;
try { try {
const user = await prisma.user.findUnique({ const user = await prisma.user.findUnique({
where: { id: request.user.userId }, where: { id: request.user.userId }
}); });
if (!user) { if (!user) {
throw "User not found"; throw 'User not found';
} }
} catch (error) { } catch (error) {
throw { status: 401, message: error }; throw { status: 401, message: error };
@@ -404,17 +424,15 @@ export async function getCurrentUser(
try { try {
const user = await prisma.user.findFirst({ const user = await prisma.user.findFirst({
where: { id: request.user.userId, teams: { some: { id: teamId } } }, where: { id: request.user.userId, teams: { some: { id: teamId } } },
include: { teams: true, permission: true }, include: { teams: true, permission: true }
}); });
if (user) { if (user) {
const permission = user.permission.find( const permission = user.permission.find((p) => p.teamId === teamId).permission;
(p) => p.teamId === teamId
).permission;
const payload = { const payload = {
...request.user, ...request.user,
teamId, teamId,
permission: permission || null, permission: permission || null,
isAdmin: permission === "owner" || permission === "admin", isAdmin: permission === 'owner' || permission === 'admin'
}; };
token = fastify.jwt.sign(payload); token = fastify.jwt.sign(payload);
} }
@@ -422,12 +440,14 @@ export async function getCurrentUser(
// No new token -> not switching teams // No new token -> not switching teams
} }
} }
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } }) const pendingInvitations = await prisma.teamInvitation.findMany({
where: { uid: request.user.userId }
});
return { return {
settings: await prisma.setting.findUnique({ where: { id: "0" } }), settings: await prisma.setting.findUnique({ where: { id: '0' } }),
sentryDSN, sentryDSN,
pendingInvitations, pendingInvitations,
token, token,
...request.user, ...request.user
}; };
} }

File diff suppressed because it is too large Load Diff

View File

@@ -22,11 +22,11 @@ export async function listSources(request: FastifyRequest) {
export async function saveSource(request, reply) { export async function saveSource(request, reply) {
try { try {
const { id } = request.params const { id } = request.params
let { name, htmlUrl, apiUrl, customPort, isSystemWide } = request.body let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body
if (customPort) customPort = Number(customPort) if (customPort) customPort = Number(customPort)
await prisma.gitSource.update({ await prisma.gitSource.update({
where: { id }, where: { id },
data: { name, htmlUrl, apiUrl, customPort, isSystemWide } data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
}); });
return reply.code(201).send() return reply.code(201).send()
} catch ({ status, message }) { } catch ({ status, message }) {
@@ -48,6 +48,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
apiUrl: null, apiUrl: null,
organization: null, organization: null,
customPort: 22, customPort: 22,
customUser: 'git',
}, },
settings settings
} }
@@ -133,7 +134,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
try { try {
const { id } = request.params const { id } = request.params
const { teamId } = request.user const { teamId } = request.user
let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort } = let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort, customUser } =
request.body request.body
if (oauthId) oauthId = Number(oauthId); if (oauthId) oauthId = Number(oauthId);
@@ -142,7 +143,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
if (id === 'new') { if (id === 'new') {
const newId = cuid() const newId = cuid()
await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, teams: { connect: { id: teamId } } } }); await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, customUser, teams: { connect: { id: teamId } } } });
await prisma.gitlabApp.create({ await prisma.gitlabApp.create({
data: { data: {
teams: { connect: { id: teamId } }, teams: { connect: { id: teamId } },
@@ -158,7 +159,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
id: newId id: newId
} }
} else { } else {
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort } }); await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort, customUser } });
await prisma.gitlabApp.update({ await prisma.gitlabApp.update({
where: { id }, where: { id },
data: { data: {

View File

@@ -21,6 +21,7 @@ export interface SaveGitLabSource extends OnlyId {
appSecret: string, appSecret: string,
groupName: string, groupName: string,
customPort: number, customPort: number,
customUser: string,
} }
} }
export interface CheckGitLabOAuthId extends OnlyId { export interface CheckGitLabOAuthId extends OnlyId {

View File

@@ -1,9 +1,32 @@
import { FastifyRequest } from "fastify"; import { FastifyRequest } from 'fastify';
import { errorHandler, getDomain, isDev, prisma, executeCommand } from "../../../lib/common"; import { errorHandler, getDomain, isDev, prisma, executeCommand } from '../../../lib/common';
import { getTemplates } from "../../../lib/services"; import { getTemplates } from '../../../lib/services';
import { OnlyId } from "../../../types"; import { OnlyId } from '../../../types';
import { parseAndFindServiceTemplates } from '../../api/v1/services/handlers';
function generateServices(serviceId, containerId, port) { function generateServices(serviceId, containerId, port, isHttp2 = false, isHttps = false) {
if (isHttp2) {
return {
[serviceId]: {
loadbalancer: {
servers: [
{
url: `${isHttps ? 'https' : 'http'}://${containerId}:${port}`
}
]
}
},
[`${serviceId}-http2`]: {
loadbalancer: {
servers: [
{
url: `h2c://${containerId}:${port}`
}
]
}
}
};
}
return { return {
[serviceId]: { [serviceId]: {
loadbalancer: { loadbalancer: {
@@ -14,43 +37,57 @@ function generateServices(serviceId, containerId, port) {
] ]
} }
} }
} };
} }
function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, isDualCerts, isCustomSSL) { function generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
isDualCerts,
isCustomSSL,
isHttp2 = false
) {
let rule = `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`;
let ruleWWW = `Host(\`www.${nakedDomain}\`)${
pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''
}`;
let http: any = { let http: any = {
entrypoints: ['web'], entrypoints: ['web'],
rule: `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`, rule,
service: `${serviceId}`, service: `${serviceId}`,
priority: 2, priority: 2,
middlewares: [] middlewares: []
} };
let https: any = { let https: any = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`, rule,
service: `${serviceId}`, service: `${serviceId}`,
priority: 2, priority: 2,
tls: { tls: {
certresolver: 'letsencrypt' certresolver: 'letsencrypt'
}, },
middlewares: [] middlewares: []
} };
let httpWWW: any = { let httpWWW: any = {
entrypoints: ['web'], entrypoints: ['web'],
rule: `Host(\`www.${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`, rule: ruleWWW,
service: `${serviceId}`, service: `${serviceId}`,
priority: 2, priority: 2,
middlewares: [] middlewares: []
} };
let httpsWWW: any = { let httpsWWW: any = {
entrypoints: ['websecure'], entrypoints: ['websecure'],
rule: `Host(\`www.${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`, rule: ruleWWW,
service: `${serviceId}`, service: `${serviceId}`,
priority: 2, priority: 2,
tls: { tls: {
certresolver: 'letsencrypt' certresolver: 'letsencrypt'
}, },
middlewares: [] middlewares: []
} };
// 2. http + non-www only // 2. http + non-www only
if (!isHttps && !isWWW) { if (!isHttps && !isWWW) {
https.middlewares.push('redirect-to-http'); https.middlewares.push('redirect-to-http');
@@ -58,19 +95,19 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
httpWWW.middlewares.push('redirect-to-non-www'); httpWWW.middlewares.push('redirect-to-non-www');
httpsWWW.middlewares.push('redirect-to-non-www'); httpsWWW.middlewares.push('redirect-to-non-www');
delete https.tls delete https.tls;
delete httpsWWW.tls delete httpsWWW.tls;
} }
// 3. http + www only // 3. http + www only
if (!isHttps && isWWW) { if (!isHttps && isWWW) {
https.middlewares.push('redirect-to-http'); https.middlewares.push('redirect-to-http');
httpsWWW.middlewares.push('redirect-to-http'); httpsWWW.middlewares.push('redirect-to-http');
http.middlewares.push('redirect-to-www'); http.middlewares.push('redirect-to-www');
https.middlewares.push('redirect-to-www'); https.middlewares.push('redirect-to-www');
delete https.tls delete https.tls;
delete httpsWWW.tls delete httpsWWW.tls;
} }
// 5. https + non-www only // 5. https + non-www only
if (isHttps && !isWWW) { if (isHttps && !isWWW) {
@@ -86,17 +123,17 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
httpsWWW.tls = true; httpsWWW.tls = true;
} else { } else {
https.tls = true; https.tls = true;
delete httpsWWW.tls.certresolver delete httpsWWW.tls.certresolver;
httpsWWW.tls.domains = { httpsWWW.tls.domains = {
main: domain main: domain
} };
} }
} else { } else {
if (!isDualCerts) { if (!isDualCerts) {
delete httpsWWW.tls.certresolver delete httpsWWW.tls.certresolver;
httpsWWW.tls.domains = { httpsWWW.tls.domains = {
main: domain main: domain
} };
} }
} }
} }
@@ -114,26 +151,59 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
httpsWWW.tls = true; httpsWWW.tls = true;
} else { } else {
httpsWWW.tls = true; httpsWWW.tls = true;
delete https.tls.certresolver delete https.tls.certresolver;
https.tls.domains = { https.tls.domains = {
main: domain main: domain
} };
} }
} else { } else {
if (!isDualCerts) { if (!isDualCerts) {
delete https.tls.certresolver delete https.tls.certresolver;
https.tls.domains = { https.tls.domains = {
main: domain main: domain
} };
} }
} }
} }
if (isHttp2) {
let http2 = {
...http,
service: `${serviceId}-http2`,
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
};
let http2WWW = {
...httpWWW,
service: `${serviceId}-http2`,
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
};
let https2 = {
...https,
service: `${serviceId}-http2`,
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
};
let https2WWW = {
...httpsWWW,
service: `${serviceId}-http2`,
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
};
return {
[`${serviceId}-${pathPrefix}`]: { ...http },
[`${serviceId}-${pathPrefix}-http2`]: { ...http2 },
[`${serviceId}-${pathPrefix}-secure`]: { ...https },
[`${serviceId}-${pathPrefix}-secure-http2`]: { ...https2 },
[`${serviceId}-${pathPrefix}-www`]: { ...httpWWW },
[`${serviceId}-${pathPrefix}-www-http2`]: { ...http2WWW },
[`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW },
[`${serviceId}-${pathPrefix}-secure-www-http2`]: { ...https2WWW }
};
}
return { return {
[`${serviceId}-${pathPrefix}`]: { ...http }, [`${serviceId}-${pathPrefix}`]: { ...http },
[`${serviceId}-${pathPrefix}-secure`]: { ...https }, [`${serviceId}-${pathPrefix}-secure`]: { ...https },
[`${serviceId}-${pathPrefix}-www`]: { ...httpWWW }, [`${serviceId}-${pathPrefix}-www`]: { ...httpWWW },
[`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW }, [`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW }
} };
} }
export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote: boolean = false) { export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote: boolean = false) {
const traefik = { const traefik = {
@@ -174,26 +244,26 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
const coolifySettings = await prisma.setting.findFirst(); const coolifySettings = await prisma.setting.findFirst();
if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) { if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) {
traefik.http.routers['catchall-http'] = { traefik.http.routers['catchall-http'] = {
entrypoints: ["web"], entrypoints: ['web'],
rule: "HostRegexp(`{catchall:.*}`)", rule: 'HostRegexp(`{catchall:.*}`)',
service: "noop", service: 'noop',
priority: 1, priority: 1,
middlewares: ["redirect-regexp"] middlewares: ['redirect-regexp']
} };
traefik.http.routers['catchall-https'] = { traefik.http.routers['catchall-https'] = {
entrypoints: ["websecure"], entrypoints: ['websecure'],
rule: "HostRegexp(`{catchall:.*}`)", rule: 'HostRegexp(`{catchall:.*}`)',
service: "noop", service: 'noop',
priority: 1, priority: 1,
middlewares: ["redirect-regexp"] middlewares: ['redirect-regexp']
} };
traefik.http.middlewares['redirect-regexp'] = { traefik.http.middlewares['redirect-regexp'] = {
redirectregex: { redirectregex: {
regex: '(.*)', regex: '(.*)',
replacement: coolifySettings.proxyDefaultRedirect, replacement: coolifySettings.proxyDefaultRedirect,
permanent: false permanent: false
} }
} };
traefik.http.services['noop'] = { traefik.http.services['noop'] = {
loadBalancer: { loadBalancer: {
servers: [ servers: [
@@ -202,25 +272,41 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
} }
] ]
} }
} };
} }
const sslpath = '/etc/traefik/acme/custom'; const sslpath = '/etc/traefik/acme/custom';
let certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } } } } }) let certificates = await prisma.certificate.findMany({
where: {
team: {
applications: { some: { settings: { isCustomSSL: true } } },
destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } }
}
}
});
if (remote) { if (remote) {
certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true } } } } }) certificates = await prisma.certificate.findMany({
where: {
team: {
applications: { some: { settings: { isCustomSSL: true } } },
destinationDocker: {
some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true }
}
}
}
});
} }
let parsedCertificates = [] let parsedCertificates = [];
for (const certificate of certificates) { for (const certificate of certificates) {
parsedCertificates.push({ parsedCertificates.push({
certFile: `${sslpath}/${certificate.id}-cert.pem`, certFile: `${sslpath}/${certificate.id}-cert.pem`,
keyFile: `${sslpath}/${certificate.id}-key.pem` keyFile: `${sslpath}/${certificate.id}-key.pem`
}) });
} }
if (parsedCertificates.length > 0) { if (parsedCertificates.length > 0) {
traefik.tls.certificates = parsedCertificates traefik.tls.certificates = parsedCertificates;
} }
let applications = []; let applications = [];
@@ -236,7 +322,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
destinationDocker: true, destinationDocker: true,
persistentStorage: true, persistentStorage: true,
serviceSecret: true, serviceSecret: true,
serviceSetting: true, serviceSetting: true
}, },
orderBy: { createdAt: 'desc' } orderBy: { createdAt: 'desc' }
}); });
@@ -251,23 +337,25 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
destinationDocker: true, destinationDocker: true,
persistentStorage: true, persistentStorage: true,
serviceSecret: true, serviceSecret: true,
serviceSetting: true, serviceSetting: true
}, },
orderBy: { createdAt: 'desc' }, orderBy: { createdAt: 'desc' }
}); });
} }
if (applications.length > 0) { if (applications.length > 0) {
const dockerIds = new Set() const dockerIds = new Set();
const runningContainers = {} const runningContainers = {};
applications.forEach((app) => dockerIds.add(app.destinationDocker.id)); applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
for (const dockerId of dockerIds) { for (const dockerId of dockerIds) {
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` }) const { stdout: container } = await executeCommand({
dockerId,
command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'`
});
if (container) { if (container) {
const containersArray = container.trim().split('\n'); const containersArray = container.trim().split('\n');
if (containersArray.length > 0) { if (containersArray.length > 0) {
runningContainers[dockerId] = containersArray runningContainers[dockerId] = containersArray;
} }
} }
} }
@@ -289,38 +377,54 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
if ( if (
!runningContainers[destinationDockerId] || !runningContainers[destinationDockerId] ||
runningContainers[destinationDockerId].length === 0 || runningContainers[destinationDockerId].length === 0 ||
runningContainers[destinationDockerId].filter((container) => container.startsWith(id)).length === 0 runningContainers[destinationDockerId].filter((container) => container.startsWith(id))
.length === 0
) { ) {
continue continue;
} }
if (buildPack === 'compose') { if (buildPack === 'compose') {
const services = Object.entries(JSON.parse(dockerComposeConfiguration)) const services = Object.entries(JSON.parse(dockerComposeConfiguration));
if (services.length > 0) { if (services.length > 0) {
for (const service of services) { for (const service of services) {
const [key, value] = service const [key, value] = service;
if (key && value) { if (key && value) {
if (!value.fqdn || !value.port) { if (!value.fqdn || !value.port) {
continue; continue;
} }
const { fqdn, port } = value const { fqdn, port } = value;
const containerId = `${id}-${key}` const containerId = `${id}-${key}`;
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://'); const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.'); const isWWW = fqdn.includes('www.');
const pathPrefix = '/' const pathPrefix = '/';
const isCustomSSL = false; const isCustomSSL = false;
const dualCerts = false; const dualCerts = false;
const serviceId = `${id}-${port || 'default'}` const serviceId = `${id}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, containerId, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, containerId, port)
};
} }
} }
} }
continue; continue;
} }
const { previews, dualCerts, isCustomSSL } = settings; const { previews, dualCerts, isCustomSSL, isHttp2 } = settings;
const { network, id: dockerId } = destinationDocker; const { network, id: dockerId } = destinationDocker;
if (!fqdn) { if (!fqdn) {
continue; continue;
@@ -329,12 +433,31 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://'); const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.'); const isWWW = fqdn.includes('www.');
const pathPrefix = '/' const pathPrefix = '/';
const serviceId = `${id}-${port || 'default'}` const serviceId = `${id}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL,
isHttp2
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, id, port, isHttp2, isHttps)
};
if (previews) { if (previews) {
const { stdout } = await executeCommand({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` }) const { stdout } = await executeCommand({
dockerId,
command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"`
});
if (stdout) { if (stdout) {
const containers = stdout const containers = stdout
.trim() .trim()
@@ -343,44 +466,57 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
.map((c) => c.replace(/"/g, '')); .map((c) => c.replace(/"/g, ''));
if (containers.length > 0) { if (containers.length > 0) {
for (const container of containers) { for (const container of containers) {
const previewDomain = `${container.split('-')[1]}${coolifySettings.previewSeparator}${domain}`; const previewDomain = `${container.split('-')[1]}${
coolifySettings.previewSeparator
}${domain}`;
const nakedDomain = previewDomain.replace(/^www\./, ''); const nakedDomain = previewDomain.replace(/^www\./, '');
const pathPrefix = '/' const pathPrefix = '/';
const serviceId = `${container}-${port || 'default'}` const serviceId = `${container}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
previewDomain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, container, port, isHttp2)
};
} }
} }
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
} }
} }
if (services.length > 0) { if (services.length > 0) {
const dockerIds = new Set() const dockerIds = new Set();
const runningContainers = {} const runningContainers = {};
services.forEach((app) => dockerIds.add(app.destinationDocker.id)); services.forEach((app) => dockerIds.add(app.destinationDocker.id));
for (const dockerId of dockerIds) { for (const dockerId of dockerIds) {
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` }) const { stdout: container } = await executeCommand({
dockerId,
command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'`
});
if (container) { if (container) {
const containersArray = container.trim().split('\n'); const containersArray = container.trim().split('\n');
if (containersArray.length > 0) { if (containersArray.length > 0) {
runningContainers[dockerId] = containersArray runningContainers[dockerId] = containersArray;
} }
} }
} }
for (const service of services) { for (const service of services) {
try { try {
let { let { fqdn, id, type, destinationDockerId, dualCerts, serviceSetting } = service;
fqdn,
id,
type,
destinationDockerId,
dualCerts,
serviceSetting
} = service;
if (!fqdn) { if (!fqdn) {
continue; continue;
} }
@@ -392,7 +528,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
runningContainers[destinationDockerId].length === 0 || runningContainers[destinationDockerId].length === 0 ||
!runningContainers[destinationDockerId].includes(id) !runningContainers[destinationDockerId].includes(id)
) { ) {
continue continue;
} }
const templates = await getTemplates(); const templates = await getTemplates();
let found = templates.find((a) => a.type === type); let found = templates.find((a) => a.type === type);
@@ -401,88 +537,144 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
} }
found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id)); found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id));
for (const oneService of Object.keys(found.services)) { for (const oneService of Object.keys(found.services)) {
const isDomainConfiguration = found?.services[oneService]?.proxy?.filter(p => p.domain) ?? []; const isDomainAndProxyConfiguration =
if (isDomainConfiguration.length > 0) { found?.services[oneService]?.proxy?.filter((p) => p.port) ?? [];
const { proxy } = found.services[oneService]; if (isDomainAndProxyConfiguration.length > 0) {
const template: any = await parseAndFindServiceTemplates(service, null, true);
const { proxy } = template.services[oneService] || found.services[oneService];
for (let configuration of proxy) { for (let configuration of proxy) {
if (configuration.domain) { if (configuration.domain) {
const setting = serviceSetting.find((a) => a.variableName === configuration.domain); const setting = serviceSetting.find(
(a) => a.variableName === configuration.domain
);
if (setting) { if (setting) {
configuration.domain = configuration.domain.replace(configuration.domain, setting.value); configuration.domain = configuration.domain.replace(
configuration.domain,
setting.value
);
} }
} }
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port') const foundPortVariable = serviceSetting.find(
(a) => a.name.toLowerCase() === 'port'
);
if (foundPortVariable) { if (foundPortVariable) {
configuration.port = foundPortVariable.value configuration.port = foundPortVariable.value;
} }
let port, pathPrefix, customDomain; let port, pathPrefix, customDomain;
if (configuration) { if (configuration) {
port = configuration?.port; port = configuration?.port;
pathPrefix = configuration?.pathPrefix || '/'; pathPrefix = configuration?.pathPrefix || '/';
customDomain = configuration?.domain customDomain = configuration?.domain;
} }
if (customDomain) { if (customDomain) {
fqdn = customDomain fqdn = customDomain;
} else { } else {
fqdn = service.fqdn fqdn = service.fqdn;
} }
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://'); const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.'); const isWWW = fqdn.includes('www.');
const isCustomSSL = false; const isCustomSSL = false;
const serviceId = `${oneService}-${port || 'default'}` const serviceId = `${oneService}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, oneService, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, oneService, port)
};
} }
} else { } else {
if (found.services[oneService].ports && found.services[oneService].ports.length > 0) { if (found.services[oneService].ports && found.services[oneService].ports.length > 0) {
for (let [index, port] of found.services[oneService].ports.entries()) { for (let [index, port] of found.services[oneService].ports.entries()) {
if (port == 22) continue; if (port == 22) continue;
if (index === 0) { if (index === 0) {
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port') const foundPortVariable = serviceSetting.find(
(a) => a.name.toLowerCase() === 'port'
);
if (foundPortVariable) { if (foundPortVariable) {
port = foundPortVariable.value port = foundPortVariable.value;
} }
} }
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://'); const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.'); const isWWW = fqdn.includes('www.');
const pathPrefix = '/' const pathPrefix = '/';
const isCustomSSL = false const isCustomSSL = false;
const serviceId = `${oneService}-${port || 'default'}` const serviceId = `${oneService}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, id, port)
};
} }
} }
} }
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} }
} }
} }
if (!remote) { if (!remote) {
const { fqdn, dualCerts } = await prisma.setting.findFirst(); const { fqdn, dualCerts } = await prisma.setting.findFirst();
if (!fqdn) { if (!fqdn) {
return return;
} }
const domain = getDomain(fqdn); const domain = getDomain(fqdn);
const nakedDomain = domain.replace(/^www\./, ''); const nakedDomain = domain.replace(/^www\./, '');
const isHttps = fqdn.startsWith('https://'); const isHttps = fqdn.startsWith('https://');
const isWWW = fqdn.includes('www.'); const isWWW = fqdn.includes('www.');
const id = isDev ? 'host.docker.internal' : 'coolify' const id = isDev ? 'host.docker.internal' : 'coolify';
const container = isDev ? 'host.docker.internal' : 'coolify' const container = isDev ? 'host.docker.internal' : 'coolify';
const port = 3000 const port = 3000;
const pathPrefix = '/' const pathPrefix = '/';
const isCustomSSL = false const isCustomSSL = false;
const serviceId = `${id}-${port || 'default'}` const serviceId = `${id}-${port || 'default'}`;
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) } traefik.http.routers = {
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) } ...traefik.http.routers,
...generateRouters(
serviceId,
domain,
nakedDomain,
pathPrefix,
isHttps,
isWWW,
dualCerts,
isCustomSSL
)
};
traefik.http.services = {
...traefik.http.services,
...generateServices(serviceId, container, port)
};
} }
} catch (error) { } catch (error) {
console.log(error) console.log(error);
} finally { } finally {
if (Object.keys(traefik.http.routers).length === 0) { if (Object.keys(traefik.http.routers).length === 0) {
traefik.http.routers = null; traefik.http.routers = null;
@@ -496,9 +688,9 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
export async function otherProxyConfiguration(request: FastifyRequest<TraefikOtherConfiguration>) { export async function otherProxyConfiguration(request: FastifyRequest<TraefikOtherConfiguration>) {
try { try {
const { id } = request.query const { id } = request.query;
if (id) { if (id) {
const { privatePort, publicPort, type, address = id } = request.query const { privatePort, publicPort, type, address = id } = request.query;
let traefik = {}; let traefik = {};
if (publicPort && type && privatePort) { if (publicPort && type && privatePort) {
if (type === 'tcp') { if (type === 'tcp') {
@@ -559,18 +751,18 @@ export async function otherProxyConfiguration(request: FastifyRequest<TraefikOth
} }
} }
} else { } else {
throw { status: 500 } throw { status: 500 };
} }
} }
} else { } else {
throw { status: 500 } throw { status: 500 };
} }
return { return {
...traefik ...traefik
}; };
} }
throw { status: 500 } throw { status: 500 };
} catch ({ status, message }) { } catch ({ status, message }) {
return errorHandler({ status, message }) return errorHandler({ status, message });
} }
} }

View File

@@ -4,9 +4,11 @@ import { proxyConfiguration, otherProxyConfiguration } from './handlers';
import { OtherProxyConfiguration } from './types'; import { OtherProxyConfiguration } from './types';
const root: FastifyPluginAsync = async (fastify): Promise<void> => { const root: FastifyPluginAsync = async (fastify): Promise<void> => {
fastify.get<OnlyId>('/main.json', async (request, reply) => proxyConfiguration(request, false)); fastify.get<OnlyId>('/main.json', async (request, reply) => proxyConfiguration(request, false));
fastify.get<OnlyId>('/remote/:id', async (request) => proxyConfiguration(request, true)); fastify.get<OnlyId>('/remote/:id', async (request) => proxyConfiguration(request, true));
fastify.get<OtherProxyConfiguration>('/other.json', async (request, reply) => otherProxyConfiguration(request)); fastify.get<OtherProxyConfiguration>('/other.json', async (request, reply) =>
otherProxyConfiguration(request)
);
}; };
export default root; export default root;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,9 +0,0 @@
import { parentPort } from 'node:worker_threads';
import process from 'node:process';
console.log('Hello TypeScript!');
// signal to parent that the job is done
if (parentPort) parentPort.postMessage('done');
// eslint-disable-next-line unicorn/no-process-exit
else process.exit(0);

View File

@@ -1,26 +0,0 @@
import Bree from 'bree';
import path from 'path';
import Cabin from 'cabin';
import TSBree from '@breejs/ts-worker';
export const isDev = process.env['NODE_ENV'] === 'development';
Bree.extend(TSBree);
const options: any = {
defaultExtension: 'js',
logger: new Cabin(),
// logger: false,
// workerMessageHandler: async ({ name, message }) => {
// if (name === 'deployApplication' && message?.deploying) {
// if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
// scheduler.workers.get('deployApplication').postMessage('cancel')
// }
// }
// },
// jobs: [{ name: 'deployApplication' }]
jobs: [{ name: 'worker' }]
};
if (isDev) options.root = path.join(__dirname, '../jobs');
export const scheduler = new Bree(options);

View File

@@ -1,84 +0,0 @@
import { z } from 'zod';
import { privateProcedure, router } from '../trpc';
import { decrypt } from '../../lib/common';
import { prisma } from '../../prisma';
import { executeCommand } from '../../lib/executeCommand';
import { stopDatabaseContainer, stopTcpHttpProxy } from '../../lib/docker';
export const databasesRouter = router({
status: privateProcedure.input(z.object({ id: z.string() })).query(async ({ ctx, input }) => {
const id = input.id;
const teamId = ctx.user?.teamId;
let isRunning = false;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (database) {
const { destinationDockerId, destinationDocker } = database;
if (destinationDockerId) {
try {
const { stdout } = await executeCommand({
dockerId: destinationDocker.id,
command: `docker inspect --format '{{json .State}}' ${id}`
});
if (JSON.parse(stdout).Running) {
isRunning = true;
}
} catch (error) {
//
}
}
}
return {
isRunning
};
}),
cleanup: privateProcedure.query(async ({ ctx }) => {
const teamId = ctx.user?.teamId;
let databases = await prisma.database.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { settings: true, destinationDocker: true, teams: true }
});
for (const database of databases) {
if (!database?.version) {
const { id } = database;
if (database.destinationDockerId) {
const everStarted = await stopDatabaseContainer(database);
if (everStarted)
await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
}
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } });
}
}
return {};
}),
delete: privateProcedure
.input(z.object({ id: z.string(), force: z.boolean() }))
.mutation(async ({ ctx, input }) => {
const { id, force } = input;
const teamId = ctx.user?.teamId;
const database = await prisma.database.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, settings: true }
});
if (!force) {
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
if (database.rootUserPassword)
database.rootUserPassword = decrypt(database.rootUserPassword);
if (database.destinationDockerId) {
const everStarted = await stopDatabaseContainer(database);
if (everStarted)
await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
}
}
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
await prisma.database.delete({ where: { id } });
return {};
})
});

View File

@@ -1,171 +0,0 @@
import { z } from 'zod';
import { privateProcedure, router } from '../trpc';
import { decrypt, getTemplates, removeService } from '../../lib/common';
import { prisma } from '../../prisma';
import { executeCommand } from '../../lib/executeCommand';
export const servicesRouter = router({
status: privateProcedure.input(z.object({ id: z.string() })).query(async ({ ctx, input }) => {
const id = input.id;
const teamId = ctx.user?.teamId;
if (!teamId) {
throw { status: 400, message: 'Team not found.' };
}
const service = await getServiceFromDB({ id, teamId });
const { destinationDockerId } = service;
let payload = {};
if (destinationDockerId) {
const { stdout: containers } = await executeCommand({
dockerId: service.destinationDocker.id,
command: `docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
});
if (containers) {
const containersArray = containers.trim().split('\n');
if (containersArray.length > 0 && containersArray[0] !== '') {
const templates = await getTemplates();
let template = templates.find((t: { type: string }) => t.type === service.type);
const templateStr = JSON.stringify(template);
if (templateStr) {
template = JSON.parse(templateStr.replaceAll('$$id', service.id));
}
for (const container of containersArray) {
let isRunning = false;
let isExited = false;
let isRestarting = false;
let isExcluded = false;
const containerObj = JSON.parse(container);
const exclude = template?.services[containerObj.Names]?.exclude;
if (exclude) {
payload[containerObj.Names] = {
status: {
isExcluded: true,
isRunning: false,
isExited: false,
isRestarting: false
}
};
continue;
}
const status = containerObj.State;
if (status === 'running') {
isRunning = true;
}
if (status === 'exited') {
isExited = true;
}
if (status === 'restarting') {
isRestarting = true;
}
payload[containerObj.Names] = {
status: {
isExcluded,
isRunning,
isExited,
isRestarting
}
};
}
}
}
}
return payload;
}),
cleanup: privateProcedure.query(async ({ ctx }) => {
const teamId = ctx.user?.teamId;
let services = await prisma.service.findMany({
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: { destinationDocker: true, teams: true }
});
for (const service of services) {
if (!service.fqdn) {
if (service.destinationDockerId) {
const { stdout: containers } = await executeCommand({
dockerId: service.destinationDockerId,
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
});
if (containers) {
const containerArray = containers.split('\n');
if (containerArray.length > 0) {
for (const container of containerArray) {
await executeCommand({
dockerId: service.destinationDockerId,
command: `docker stop -t 0 ${container}`
});
await executeCommand({
dockerId: service.destinationDockerId,
command: `docker rm --force ${container}`
});
}
}
}
}
await removeService({ id: service.id });
}
}
}),
delete: privateProcedure
.input(z.object({ force: z.boolean(), id: z.string() }))
.mutation(async ({ input }) => {
// todo: check if user is allowed to delete service
const { id } = input;
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
await prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
await prisma.fider.deleteMany({ where: { serviceId: id } });
await prisma.ghost.deleteMany({ where: { serviceId: id } });
await prisma.umami.deleteMany({ where: { serviceId: id } });
await prisma.hasura.deleteMany({ where: { serviceId: id } });
await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
await prisma.minio.deleteMany({ where: { serviceId: id } });
await prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
await prisma.wordpress.deleteMany({ where: { serviceId: id } });
await prisma.glitchTip.deleteMany({ where: { serviceId: id } });
await prisma.moodle.deleteMany({ where: { serviceId: id } });
await prisma.appwrite.deleteMany({ where: { serviceId: id } });
await prisma.searxng.deleteMany({ where: { serviceId: id } });
await prisma.weblate.deleteMany({ where: { serviceId: id } });
await prisma.taiga.deleteMany({ where: { serviceId: id } });
await prisma.service.delete({ where: { id } });
return {};
})
});
export async function getServiceFromDB({
id,
teamId
}: {
id: string;
teamId: string;
}): Promise<any> {
const settings = await prisma.setting.findFirst();
const body = await prisma.service.findFirst({
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
include: {
destinationDocker: true,
persistentStorage: true,
serviceSecret: true,
serviceSetting: true,
wordpress: true,
plausibleAnalytics: true
}
});
if (!body) {
return null;
}
// body.type = fixType(body.type);
if (body?.serviceSecret.length > 0) {
body.serviceSecret = body.serviceSecret.map((s) => {
s.value = decrypt(s.value);
return s;
});
}
if (body.wordpress) {
body.wordpress.ftpPassword = decrypt(body.wordpress.ftpPassword);
}
return { ...body, settings };
}

View File

@@ -3,6 +3,15 @@ import { addToast } from './store';
import Cookies from 'js-cookie'; import Cookies from 'js-cookie';
export const asyncSleep = (delay: number) => new Promise((resolve) => setTimeout(resolve, delay)); export const asyncSleep = (delay: number) => new Promise((resolve) => setTimeout(resolve, delay));
export function dashify(str: string, options?: any): string {
if (typeof str !== 'string') return str;
return str
.trim()
.replace(/\W/g, (m) => (/[À-ž]/.test(m) ? m : '-'))
.replace(/^-+|-+$/g, '')
.replace(/-{2,}/g, (m) => (options && options.condense ? '-' : m))
.toLowerCase();
}
export function errorNotification(error: any | { message: string }): void { export function errorNotification(error: any | { message: string }): void {
if (error instanceof Error) { if (error instanceof Error) {
console.error(error.message) console.error(error.message)

View File

@@ -0,0 +1,44 @@
<script lang="ts">
import ExternalLink from './ExternalLink.svelte';
import Tooltip from './Tooltip.svelte';
export let url = 'https://docs.coollabs.io';
export let text: any = '';
export let isExternal = false;
let id =
'cool-' +
url
.split('')
.map((c) => c.charCodeAt(0).toString(16).padStart(2, '0'))
.join('')
.slice(-16);
</script>
<a
{id}
href={url}
target="_blank noreferrer"
class="flex no-underline inline-block cursor-pointer"
class:icons={!text}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-6 h-6"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M9.879 7.519c1.171-1.025 3.071-1.025 4.242 0 1.172 1.025 1.172 2.687 0 3.712-.203.179-.43.326-.67.442-.745.361-1.45.999-1.45 1.827v.75M21 12a9 9 0 11-18 0 9 9 0 0118 0zm-9 5.25h.008v.008H12v-.008z"
/>
</svg>
{text}
{#if isExternal}
<ExternalLink />
{/if}
</a>
{#if !text}
<Tooltip triggeredBy={`#${id}`}>See details in the documentation</Tooltip>
{/if}

View File

@@ -0,0 +1,10 @@
<svg
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
viewBox="0 0 24 24"
stroke-width="3"
stroke="currentColor"
class="w-3 h-3 text-white"
>
<path stroke-linecap="round" stroke-linejoin="round" d="M4.5 19.5l15-15m0 0H8.25m11.25 0v11.25" />
</svg>

After

Width:  |  Height:  |  Size: 261 B

View File

@@ -0,0 +1,6 @@
<script lang="ts">
export let text: string;
export let customClass = 'max-w-[24rem]';
</script>
<div class="p-2 text-xs text-stone-400 {customClass}">{@html text}</div>

View File

Before

Width:  |  Height:  |  Size: 486 B

After

Width:  |  Height:  |  Size: 486 B

View File

Before

Width:  |  Height:  |  Size: 262 B

After

Width:  |  Height:  |  Size: 262 B

Some files were not shown because too many files have changed in this diff Show More