mirror of
https://github.com/ershisan99/coolify.git
synced 2026-01-04 12:33:47 +00:00
Compare commits
113 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6311627899 | ||
|
|
37cea5fb61 | ||
|
|
655a8cd60d | ||
|
|
4c8babc96a | ||
|
|
612bacebed | ||
|
|
ade7c8566d | ||
|
|
19553ce5c8 | ||
|
|
18ed2527e8 | ||
|
|
b0652bc884 | ||
|
|
15c9ad23fe | ||
|
|
578bb12562 | ||
|
|
f82cfda07f | ||
|
|
9e52b2788d | ||
|
|
2e56a113d9 | ||
|
|
4722d777e6 | ||
|
|
2141d54ae0 | ||
|
|
e346225136 | ||
|
|
012d4dae56 | ||
|
|
b4d9fe70af | ||
|
|
85e83b5441 | ||
|
|
6b2a453b8f | ||
|
|
27021538d8 | ||
|
|
8b57a2b055 | ||
|
|
75dd894685 | ||
|
|
9101ef8774 | ||
|
|
5932540630 | ||
|
|
ec376b2e47 | ||
|
|
a176562ad0 | ||
|
|
becf37b676 | ||
|
|
9b5efab8f8 | ||
|
|
e98a8ba599 | ||
|
|
7ddac50008 | ||
|
|
9837ae359f | ||
|
|
710a829dcb | ||
|
|
ccd84fa454 | ||
|
|
335b36d3a9 | ||
|
|
2be30fae00 | ||
|
|
db5cd21884 | ||
|
|
bfd3020031 | ||
|
|
344c36997a | ||
|
|
dfd9272b70 | ||
|
|
359f4520f5 | ||
|
|
aecf014f4e | ||
|
|
d2a89ddf84 | ||
|
|
c01fe153ae | ||
|
|
4f4a838799 | ||
|
|
ac6f2567eb | ||
|
|
05a5816ac6 | ||
|
|
9c8f6e9195 | ||
|
|
2fd001f6d2 | ||
|
|
d641d32413 | ||
|
|
18064ef6a2 | ||
|
|
5cb9216add | ||
|
|
91c36dc810 | ||
|
|
6efb02fa32 | ||
|
|
97313e4180 | ||
|
|
568ab24fd9 | ||
|
|
5a745efcd3 | ||
|
|
c651570e62 | ||
|
|
8980598085 | ||
|
|
c07c742feb | ||
|
|
1053abb9a9 | ||
|
|
2c9e57cbb1 | ||
|
|
c6eaa2c8a6 | ||
|
|
5ab5e913ee | ||
|
|
cea53ca476 | ||
|
|
58af09114b | ||
|
|
c4c0417e2d | ||
|
|
74f90e6947 | ||
|
|
ad5c339780 | ||
|
|
305823db00 | ||
|
|
baf58b298f | ||
|
|
c37367d018 | ||
|
|
1c98796e64 | ||
|
|
e686d9a6ea | ||
|
|
a1936b9d59 | ||
|
|
834f9c9337 | ||
|
|
615f8cfd3b | ||
|
|
8ed134105f | ||
|
|
5d6169b270 | ||
|
|
e83de8b938 | ||
|
|
ee55e039b2 | ||
|
|
086dd89144 | ||
|
|
68e5d4dd2c | ||
|
|
55a35c6bec | ||
|
|
d09b4885fe | ||
|
|
a46773e6d8 | ||
|
|
a422d0220c | ||
|
|
e5eba8430a | ||
|
|
3d235dc316 | ||
|
|
80d3b4be8c | ||
|
|
fe8b7480df | ||
|
|
cebfc3aaa0 | ||
|
|
f778b5a12d | ||
|
|
2244050160 | ||
|
|
9284e42b62 | ||
|
|
ee40120496 | ||
|
|
30cd2149ea | ||
|
|
395df36d57 | ||
|
|
79597ea0e5 | ||
|
|
283f39270a | ||
|
|
7d892bb19d | ||
|
|
a025f124f3 | ||
|
|
84f7287bf8 | ||
|
|
2391850218 | ||
|
|
af548e6ef8 | ||
|
|
ed24a9c990 | ||
|
|
0d51b04d79 | ||
|
|
379b1de64f | ||
|
|
f3ff324925 | ||
|
|
0f2160222f | ||
|
|
ce3750c51c | ||
|
|
72a7ea6e91 |
14
.github/workflows/pocketbase-release.yml
vendored
14
.github/workflows/pocketbase-release.yml
vendored
@@ -5,7 +5,9 @@ on:
|
||||
paths:
|
||||
- "others/pocketbase/*"
|
||||
- ".github/workflows/pocketbase-release.yml"
|
||||
|
||||
branches:
|
||||
- next
|
||||
- main
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
@@ -27,7 +29,7 @@ jobs:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.10.2-arm64
|
||||
tags: coollabsio/pocketbase:0.11.0-arm64
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -48,7 +50,7 @@ jobs:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.10.2-amd64
|
||||
tags: coollabsio/pocketbase:0.11.0-amd64
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.10.2-aarch64
|
||||
tags: coollabsio/pocketbase:0.11.0-aarch64
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
@@ -87,5 +89,5 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/pocketbase:0.10.2 --amend coollabsio/pocketbase:0.10.2-amd64 --amend coollabsio/pocketbase:0.10.2-arm64 --amend coollabsio/pocketbase:0.10.2-aarch64
|
||||
docker manifest push coollabsio/pocketbase:0.10.2
|
||||
docker manifest create coollabsio/pocketbase:0.11.0 --amend coollabsio/pocketbase:0.11.0-amd64 --amend coollabsio/pocketbase:0.11.0-arm64 --amend coollabsio/pocketbase:0.11.0-aarch64
|
||||
docker manifest push coollabsio/pocketbase:0.11.0
|
||||
|
||||
10
.github/workflows/production-release.yml
vendored
10
.github/workflows/production-release.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||
aarch64:
|
||||
@@ -103,10 +103,10 @@ jobs:
|
||||
id: package-version
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
docker manifest create coollabsio/coolify:latest --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-amd64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --amend coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
docker manifest push coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
docker manifest push coollabsio/coolify:latest
|
||||
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
docker tag coollabsio/coolify:${{steps.package-version.outputs.current-version}} coollabsio/coolify:latest
|
||||
docker push coollabsio/coolify:latest
|
||||
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:latest
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
5
.github/workflows/staging-release.yml
vendored
5
.github/workflows/staging-release.yml
vendored
@@ -65,7 +65,7 @@ jobs:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next-amd64
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||
merge-manifest:
|
||||
@@ -85,8 +85,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify:next --amend coollabsio/coolify:next-amd64 --amend coollabsio/coolify:next-arm64
|
||||
docker manifest push coollabsio/coolify:next
|
||||
docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,6 +12,7 @@ dist
|
||||
apps/api/db/*.db
|
||||
apps/api/db/migration.db-journal
|
||||
apps/api/core*
|
||||
apps/server/build
|
||||
apps/backup/backups/*
|
||||
!apps/backup/backups/.gitkeep
|
||||
/logs
|
||||
|
||||
4
apps/api/.gitignore
vendored
4
apps/api/.gitignore
vendored
@@ -8,4 +8,6 @@ package
|
||||
!.env.example
|
||||
dist
|
||||
dev.db
|
||||
client
|
||||
client
|
||||
testTemplate.yaml
|
||||
testTags.json
|
||||
File diff suppressed because one or more lines are too long
@@ -1,5 +1,229 @@
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "0.10.2"
|
||||
defaultVersion: "9.22"
|
||||
documentation: https://docs.directus.io/getting-started/introduction.html
|
||||
type: directus-postgresql
|
||||
name: Directus
|
||||
subname: (PostgreSQL)
|
||||
description: >-
|
||||
Directus is a free and open-source headless CMS framework for managing custom SQL-based databases.
|
||||
labels:
|
||||
- CMS
|
||||
- headless
|
||||
services:
|
||||
$$id:
|
||||
name: Directus
|
||||
depends_on:
|
||||
- $$id-postgresql
|
||||
- $$id-redis
|
||||
image: directus/directus:$$core_version
|
||||
volumes:
|
||||
- $$id-uploads:/directus/uploads
|
||||
- $$id-database:/directus/database
|
||||
- $$id-extensions:/directus/extensions
|
||||
environment:
|
||||
- KEY=$$secret_key
|
||||
- SECRET=$$secret_secret
|
||||
- DB_CLIENT=pg
|
||||
- DB_CONNECTION_STRING=$$secret_db_connection_string
|
||||
- CACHE_ENABLED=true
|
||||
- CACHE_STORE=redis
|
||||
- CACHE_REDIS=$$secret_cache_redis
|
||||
- ADMIN_EMAIL=$$config_admin_email
|
||||
- ADMIN_PASSWORD=$$secret_admin_password
|
||||
- CACHE_AUTO_PURGE=true
|
||||
- PUBLIC_URL=$$config_public_url
|
||||
ports:
|
||||
- "8055"
|
||||
$$id-postgresql:
|
||||
name: Directus PostgreSQL
|
||||
depends_on: []
|
||||
image: postgres:14-alpine
|
||||
volumes:
|
||||
- $$id-postgresql-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=$$config_postgres_user
|
||||
- POSTGRES_PASSWORD=$$secret_postgres_password
|
||||
- POSTGRES_DB=$$config_postgres_db
|
||||
ports: []
|
||||
$$id-redis:
|
||||
name: Directus Redis
|
||||
depends_on: []
|
||||
image: redis:7.0.4-alpine
|
||||
command: "--maxmemory 512mb --maxmemory-policy allkeys-lru --maxmemory-samples 5"
|
||||
volumes:
|
||||
- "$$id-redis:/data"
|
||||
environment: []
|
||||
variables:
|
||||
- id: $$config_public_url
|
||||
name: PUBLIC_URL
|
||||
label: Public URL
|
||||
defaultValue: $$generate_fqdn
|
||||
description: ""
|
||||
- id: $$secret_db_connection_string
|
||||
name: DB_CONNECTION_STRING
|
||||
label: Directus Database Url
|
||||
defaultValue: postgresql://$$config_postgres_user:$$secret_postgres_password@$$id-postgresql:5432/$$config_postgres_db
|
||||
description: ""
|
||||
- id: $$config_postgres_db
|
||||
main: $$id-postgresql
|
||||
name: POSTGRES_DB
|
||||
label: Database
|
||||
defaultValue: directus
|
||||
description: ""
|
||||
- id: $$config_postgres_user
|
||||
main: $$id-postgresql
|
||||
name: POSTGRES_USER
|
||||
label: User
|
||||
defaultValue: $$generate_username
|
||||
description: ""
|
||||
- id: $$secret_postgres_password
|
||||
main: $$id-postgresql
|
||||
name: POSTGRES_PASSWORD
|
||||
label: Password
|
||||
defaultValue: $$generate_password
|
||||
description: ""
|
||||
showOnConfiguration: true
|
||||
- id: $$secret_cache_redis
|
||||
name: CACHE_REDIS
|
||||
label: Redis Url
|
||||
defaultValue: redis://$$id-redis:6379
|
||||
description: ""
|
||||
- id: $$config_admin_email
|
||||
name: ADMIN_EMAIL
|
||||
label: Initial Admin Email
|
||||
defaultValue: "admin@example.com"
|
||||
description: "The email address of the first user that is automatically created. You can change it later in Directus."
|
||||
- id: $$secret_admin_password
|
||||
name: ADMIN_PASSWORD
|
||||
label: Initial Admin Password
|
||||
defaultValue: $$generate_password
|
||||
description: "The password of the first user that is automatically created."
|
||||
showOnConfiguration: true
|
||||
- id: $$secret_key
|
||||
name: KEY
|
||||
label: Key
|
||||
defaultValue: $$generate_password
|
||||
description: "Unique identifier for the project."
|
||||
showOnConfiguration: true
|
||||
- id: $$secret_secret
|
||||
name: SECRET
|
||||
label: Secret
|
||||
defaultValue: $$generate_password
|
||||
description: "Secret string for the project."
|
||||
showOnConfiguration: true
|
||||
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: v1.3.8
|
||||
documentation: https://github.com/LibreTranslate/LibreTranslate
|
||||
description: Free and Open Source Machine Translation API. 100% self-hosted, offline capable and easy to setup.
|
||||
type: libretranslate
|
||||
name: Libretranslate
|
||||
labels:
|
||||
- translator
|
||||
- argos
|
||||
- python
|
||||
- libretranslate
|
||||
services:
|
||||
$$id:
|
||||
name: Libretranslate
|
||||
image: libretranslate/libretranslate:$$core_version
|
||||
environment:
|
||||
- LT_HOST=0.0.0.0
|
||||
- LT_SUGGESTIONS=true
|
||||
- LT_CHAR_LIMIT=$$config_lt_char_limit
|
||||
- LT_REQ_LIMIT=$$config_lt_req_limit
|
||||
- LT_BATCH_LIMIT=$$config_lt_batch_limit
|
||||
- LT_GA_ID=$$config_lt_ga_id
|
||||
- LT_DISABLE_WEB_UI=$$config_lt_web_ui
|
||||
volumes:
|
||||
- $$id-libretranslate:/libretranslate
|
||||
ports:
|
||||
- "5000"
|
||||
variables:
|
||||
- id: $$config_lt_char_limit
|
||||
name: LT_CHAR_LIMIT
|
||||
label: Char limit
|
||||
defaultValue: "5000"
|
||||
description: "Set character limit."
|
||||
- id: $$config_lt_req_limit
|
||||
name: LT_REQ_LIMIT
|
||||
label: Request limit
|
||||
defaultValue: "5000"
|
||||
description: "Set maximum number of requests per minute per client."
|
||||
- id: $$config_lt_batch_limit
|
||||
name: LT_BATCH_LIMIT
|
||||
label: Batch Limit
|
||||
defaultValue: "5000"
|
||||
description: "Set maximum number of texts to translate in a batch request."
|
||||
- id: $$config_lt_ga_id
|
||||
name: LT_GA_ID
|
||||
label: Google Analytics ID
|
||||
defaultValue: ""
|
||||
description: "Enable Google Analytics on the API client page by providing an ID"
|
||||
- id: $$config_lt_web_ui
|
||||
name: LT_DISABLE_WEB_UI
|
||||
label: Web UI
|
||||
defaultValue: "false"
|
||||
description: "Disable or enable web ui. True or false."
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 0.8.0
|
||||
documentation: https://github.com/benbusby/whoogle-search
|
||||
type: whoogle
|
||||
name: Whoogle Search
|
||||
description: A self-hosted, ad-free, privacy-respecting metasearch engine
|
||||
labels:
|
||||
- search
|
||||
- google
|
||||
services:
|
||||
$$id:
|
||||
name: Whoogle Search
|
||||
documentation: https://github.com/benbusby/whoogle-search
|
||||
depends_on: []
|
||||
image: benbusby/whoogle-search:$$core_version
|
||||
cap_drop:
|
||||
- ALL
|
||||
environment:
|
||||
- WHOOGLE_USER=$$config_whoogle_username
|
||||
- WHOOGLE_PASS=$$secret_whoogle_password
|
||||
- WHOOGLE_CONFIG_PREFERENCES_KEY=$$secret_whoogle_preferences_key
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
ports:
|
||||
- "5000"
|
||||
variables:
|
||||
- id: $$config_whoogle_username
|
||||
name: WHOOGLE_USER
|
||||
label: Whoogle User
|
||||
defaultValue: $$generate_username
|
||||
description: "Username to log into Whoogle"
|
||||
- id: $$secret_whoogle_password
|
||||
name: WHOOGLE_PASSWORD
|
||||
label: Whoogle Password
|
||||
defaultValue: $$generate_password
|
||||
description: "Password to log into Whoogle"
|
||||
showOnConfiguration: true
|
||||
- id: $$secret_whoogle_preferences_key
|
||||
name: WHOOGLE_CONFIG_PREFERENCES_KEY
|
||||
label: Whoogle preferences key
|
||||
defaultValue: $$generate_password
|
||||
description: "password to encrypt preferences"
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 1.1.3
|
||||
documentation: https://docs.openblocks.dev/
|
||||
type: openblocks
|
||||
name: Openblocks
|
||||
description: The Open Source Retool Alternative
|
||||
services:
|
||||
$$id:
|
||||
image: openblocksdev/openblocks-ce:$$core_version
|
||||
volumes:
|
||||
- $$id-stacks-data:/openblocks-stacks
|
||||
ports:
|
||||
- "3000"
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "0.11.0"
|
||||
documentation: https://pocketbase.io/docs/
|
||||
type: pocketbase
|
||||
name: Pocketbase
|
||||
@@ -124,12 +348,12 @@
|
||||
description: ""
|
||||
- id: $$config_disable_auth
|
||||
name: DISABLE_AUTH
|
||||
label: Disable Authentication
|
||||
label: Authentication
|
||||
defaultValue: "false"
|
||||
description: ""
|
||||
- id: $$config_disable_registration
|
||||
name: DISABLE_REGISTRATION
|
||||
label: Disable Registration
|
||||
label: Registration
|
||||
defaultValue: "true"
|
||||
description: ""
|
||||
- id: $$config_postgres_user
|
||||
@@ -157,7 +381,7 @@
|
||||
defaultValue: plausible.js
|
||||
description: This is the default script name.
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "1.17"
|
||||
defaultVersion: "1.18"
|
||||
documentation: https://docs.gitea.io
|
||||
type: gitea
|
||||
name: Gitea
|
||||
@@ -332,12 +556,12 @@
|
||||
volumes:
|
||||
- $$id-lavalink:/lavalink
|
||||
ports:
|
||||
- "2333"
|
||||
- $$config_port
|
||||
files:
|
||||
- location: /opt/Lavalink/application.yml
|
||||
content: >-
|
||||
server:
|
||||
port: $$config_port
|
||||
port: 2333
|
||||
address: 0.0.0.0
|
||||
lavalink:
|
||||
server:
|
||||
@@ -364,18 +588,13 @@
|
||||
max-file-size: 1GB
|
||||
max-history: 30
|
||||
variables:
|
||||
- id: $$config_port
|
||||
name: PORT
|
||||
label: Port
|
||||
defaultValue: "2333"
|
||||
required: true
|
||||
- id: $$secret_password
|
||||
name: PASSWORD
|
||||
label: Password
|
||||
defaultValue: $$generate_password
|
||||
required: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: v1.8.9
|
||||
defaultVersion: v1.9.3
|
||||
documentation: https://docs.appsmith.com/getting-started/setup/instance-configuration/
|
||||
type: appsmith
|
||||
name: Appsmith
|
||||
@@ -408,7 +627,7 @@
|
||||
defaultValue: "true"
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 0.57.4
|
||||
defaultVersion: 0.58.4
|
||||
documentation: https://hub.docker.com/r/zadam/trilium
|
||||
description: "A hierarchical note taking application with focus on building large personal knowledge bases."
|
||||
labels:
|
||||
@@ -428,7 +647,7 @@
|
||||
- "8080"
|
||||
variables: []
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 1.18.5
|
||||
defaultVersion: 1.19.4
|
||||
documentation: https://hub.docker.com/r/louislam/uptime-kuma
|
||||
description: A free & fancy self-hosted monitoring tool.
|
||||
labels:
|
||||
@@ -445,7 +664,7 @@
|
||||
- "3001"
|
||||
variables: []
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "5.8"
|
||||
defaultVersion: "6.0"
|
||||
documentation: https://hub.docker.com/r/silviof/docker-languagetool
|
||||
description: "A multilingual grammar, style and spell checker."
|
||||
type: languagetool
|
||||
@@ -460,7 +679,7 @@
|
||||
- "8010"
|
||||
variables: []
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 1.26.0
|
||||
defaultVersion: 1.27.0
|
||||
documentation: https://hub.docker.com/r/vaultwarden/server
|
||||
description: "Bitwarden compatible server written in Rust."
|
||||
type: vaultwarden
|
||||
@@ -478,7 +697,7 @@
|
||||
- "80"
|
||||
variables: []
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 9.3.1
|
||||
defaultVersion: 9.3.2
|
||||
documentation: https://hub.docker.com/r/grafana/grafana
|
||||
type: grafana
|
||||
name: Grafana
|
||||
@@ -499,7 +718,7 @@
|
||||
- "3000"
|
||||
variables: []
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 1.1.2
|
||||
defaultVersion: 1.2.0
|
||||
documentation: https://appwrite.io/docs
|
||||
type: appwrite
|
||||
name: Appwrite
|
||||
@@ -1669,7 +1888,7 @@
|
||||
defaultValue: weblate
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 2022.12.12-966e9c3c
|
||||
defaultVersion: 2023.01.15-52d41559
|
||||
documentation: https://docs.searxng.org/
|
||||
type: searxng
|
||||
name: SearXNG
|
||||
@@ -1742,7 +1961,7 @@
|
||||
defaultValue: $$generate_password
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: v3.0.0
|
||||
defaultVersion: v3.0.2
|
||||
documentation: https://glitchtip.com/documentation
|
||||
type: glitchtip
|
||||
name: GlitchTip
|
||||
@@ -1964,7 +2183,7 @@
|
||||
defaultValue: glitchtip
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: v2.16.0
|
||||
defaultVersion: v2.16.1
|
||||
documentation: https://hasura.io/docs/latest/index/
|
||||
type: hasura
|
||||
name: Hasura
|
||||
@@ -2444,7 +2663,7 @@
|
||||
description: ""
|
||||
showOnConfiguration: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: v0.30.1
|
||||
defaultVersion: v0.30.5
|
||||
documentation: https://docs.meilisearch.com/learn/getting_started/quick_start.html
|
||||
type: meilisearch
|
||||
name: MeiliSearch
|
||||
@@ -2474,7 +2693,7 @@
|
||||
showOnConfiguration: true
|
||||
- templateVersion: 1.0.0
|
||||
ignore: true
|
||||
defaultVersion: latest
|
||||
defaultVersion: 5.30.0
|
||||
documentation: https://docs.ghost.org
|
||||
arch: amd64
|
||||
type: ghost-mariadb
|
||||
@@ -2592,7 +2811,7 @@
|
||||
defaultValue: $$generate_password
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "5.25.3"
|
||||
defaultVersion: 5.30.0
|
||||
documentation: https://docs.ghost.org
|
||||
type: ghost-only
|
||||
name: Ghost
|
||||
@@ -2656,7 +2875,7 @@
|
||||
placeholder: "ghost_db"
|
||||
required: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: "5.25.3"
|
||||
defaultVersion: 5.30.0
|
||||
documentation: https://docs.ghost.org
|
||||
type: ghost-mysql
|
||||
name: Ghost
|
||||
@@ -2733,7 +2952,7 @@
|
||||
defaultValue: $$generate_password
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: php8.1
|
||||
defaultVersion: php8.2
|
||||
documentation: https://wordpress.org/
|
||||
type: wordpress
|
||||
name: WordPress
|
||||
@@ -2823,7 +3042,7 @@
|
||||
description: ""
|
||||
readOnly: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: php8.1
|
||||
defaultVersion: php8.2
|
||||
documentation: https://wordpress.org/
|
||||
type: wordpress-only
|
||||
name: WordPress
|
||||
@@ -2897,7 +3116,7 @@
|
||||
define('WP_DEBUG_DISPLAY', false);
|
||||
@ini_set('display_errors', 0);
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 4.9.0
|
||||
defaultVersion: 4.9.1
|
||||
documentation: https://coder.com/docs/coder-oss/latest
|
||||
type: vscodeserver
|
||||
name: VSCode Server
|
||||
@@ -2912,7 +3131,6 @@
|
||||
depends_on: []
|
||||
image: "codercom/code-server:$$core_version"
|
||||
volumes:
|
||||
- "$$id-config-data:/home/coder/.local/share/code-server"
|
||||
- "$$id-vscodeserver-data:/home/coder"
|
||||
- "$$id-keys-directory:/root/.ssh"
|
||||
- "$$id-theme-and-plugin-directory:/root/.local/share/code-server"
|
||||
@@ -2928,7 +3146,7 @@
|
||||
description: ""
|
||||
showOnConfiguration: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: RELEASE.2022-12-12T19-27-27Z
|
||||
defaultVersion: RELEASE.2023-01-12T02-06-16Z
|
||||
documentation: https://min.io/docs/minio
|
||||
type: minio
|
||||
name: MinIO
|
||||
@@ -2987,7 +3205,7 @@
|
||||
description: ""
|
||||
showOnConfiguration: true
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 0.21.1
|
||||
defaultVersion: stable
|
||||
documentation: https://fider.io/docs
|
||||
type: fider
|
||||
name: Fider
|
||||
@@ -3106,7 +3324,7 @@
|
||||
defaultValue: $$generate_username
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 0.207.0
|
||||
defaultVersion: 0.210.1
|
||||
documentation: https://docs.n8n.io
|
||||
type: n8n
|
||||
name: n8n.io
|
||||
@@ -3137,7 +3355,7 @@
|
||||
defaultValue: $$generate_fqdn
|
||||
description: ""
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: stable
|
||||
defaultVersion: v1.5.1
|
||||
documentation: https://plausible.io/doc/
|
||||
arch: amd64
|
||||
type: plausibleanalytics
|
||||
@@ -3250,12 +3468,12 @@
|
||||
description: ""
|
||||
- id: $$config_disable_auth
|
||||
name: DISABLE_AUTH
|
||||
label: Disable Authentication
|
||||
label: Authentication
|
||||
defaultValue: "false"
|
||||
description: ""
|
||||
- id: $$config_disable_registration
|
||||
name: DISABLE_REGISTRATION
|
||||
label: Disable Registration
|
||||
label: Registration
|
||||
defaultValue: "true"
|
||||
description: ""
|
||||
- id: $$config_postgresql_username
|
||||
@@ -3283,7 +3501,7 @@
|
||||
defaultValue: plausible.js
|
||||
description: This is the default script name.
|
||||
- templateVersion: 1.0.0
|
||||
defaultVersion: 0.99.1
|
||||
defaultVersion: 0.101.2
|
||||
documentation: https://docs.nocodb.com
|
||||
type: nocodb
|
||||
name: NocoDB
|
||||
|
||||
@@ -16,31 +16,31 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@breejs/ts-worker": "2.0.0",
|
||||
"@fastify/autoload": "5.5.0",
|
||||
"@fastify/autoload": "5.7.0",
|
||||
"@fastify/cookie": "8.3.0",
|
||||
"@fastify/cors": "8.2.0",
|
||||
"@fastify/env": "4.1.0",
|
||||
"@fastify/jwt": "6.3.3",
|
||||
"@fastify/env": "4.2.0",
|
||||
"@fastify/jwt": "6.5.0",
|
||||
"@fastify/multipart": "7.3.0",
|
||||
"@fastify/static": "6.5.1",
|
||||
"@fastify/static": "6.6.0",
|
||||
"@iarna/toml": "2.2.5",
|
||||
"@ladjs/graceful": "3.0.2",
|
||||
"@prisma/client": "4.6.1",
|
||||
"@sentry/node": "7.21.1",
|
||||
"@sentry/tracing": "7.21.1",
|
||||
"axe": "11.0.0",
|
||||
"@ladjs/graceful": "3.2.1",
|
||||
"@prisma/client": "4.8.1",
|
||||
"@sentry/node": "7.30.0",
|
||||
"@sentry/tracing": "7.30.0",
|
||||
"axe": "11.2.1",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bree": "9.1.2",
|
||||
"cabin": "11.0.1",
|
||||
"bree": "9.1.3",
|
||||
"cabin": "11.1.1",
|
||||
"compare-versions": "5.0.1",
|
||||
"csv-parse": "5.3.2",
|
||||
"csv-parse": "5.3.3",
|
||||
"csvtojson": "2.0.10",
|
||||
"cuid": "2.1.8",
|
||||
"dayjs": "1.11.6",
|
||||
"dayjs": "1.11.7",
|
||||
"dockerode": "3.3.4",
|
||||
"dotenv-extended": "2.9.0",
|
||||
"execa": "6.1.0",
|
||||
"fastify": "4.10.2",
|
||||
"fastify": "4.11.0",
|
||||
"fastify-plugin": "4.3.0",
|
||||
"fastify-socket.io": "4.0.0",
|
||||
"generate-password": "1.7.0",
|
||||
@@ -48,36 +48,36 @@
|
||||
"is-ip": "5.0.0",
|
||||
"is-port-reachable": "4.0.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"jsonwebtoken": "9.0.0",
|
||||
"minimist": "^1.2.7",
|
||||
"node-forge": "1.3.1",
|
||||
"node-os-utils": "1.3.7",
|
||||
"p-all": "4.0.0",
|
||||
"p-throttle": "5.0.0",
|
||||
"prisma": "4.6.1",
|
||||
"prisma": "4.8.1",
|
||||
"public-ip": "6.0.1",
|
||||
"pump": "3.0.0",
|
||||
"shell-quote": "^1.7.4",
|
||||
"socket.io": "4.5.3",
|
||||
"ssh-config": "4.1.6",
|
||||
"socket.io": "4.5.4",
|
||||
"ssh-config": "4.2.0",
|
||||
"strip-ansi": "7.0.1",
|
||||
"unique-names-generator": "4.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.11.9",
|
||||
"@types/node": "18.11.18",
|
||||
"@types/node-os-utils": "1.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.44.0",
|
||||
"@typescript-eslint/parser": "5.44.0",
|
||||
"esbuild": "0.15.15",
|
||||
"eslint": "8.28.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.48.1",
|
||||
"@typescript-eslint/parser": "5.48.1",
|
||||
"esbuild": "0.16.16",
|
||||
"eslint": "8.31.0",
|
||||
"eslint-config-prettier": "8.6.0",
|
||||
"eslint-plugin-prettier": "4.2.1",
|
||||
"nodemon": "2.0.20",
|
||||
"prettier": "2.7.1",
|
||||
"prettier": "2.8.2",
|
||||
"rimraf": "3.0.2",
|
||||
"tsconfig-paths": "4.1.0",
|
||||
"tsconfig-paths": "4.1.2",
|
||||
"types-fastify-socket.io": "0.0.1",
|
||||
"typescript": "4.9.3"
|
||||
"typescript": "4.9.4"
|
||||
},
|
||||
"prisma": {
|
||||
"seed": "node prisma/seed.js"
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_GitSource" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"forPublic" BOOLEAN NOT NULL DEFAULT false,
|
||||
"type" TEXT,
|
||||
"apiUrl" TEXT,
|
||||
"htmlUrl" TEXT,
|
||||
"customPort" INTEGER NOT NULL DEFAULT 22,
|
||||
"customUser" TEXT NOT NULL DEFAULT 'git',
|
||||
"organization" TEXT,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
"githubAppId" TEXT,
|
||||
"gitlabAppId" TEXT,
|
||||
"isSystemWide" BOOLEAN NOT NULL DEFAULT false,
|
||||
CONSTRAINT "GitSource_gitlabAppId_fkey" FOREIGN KEY ("gitlabAppId") REFERENCES "GitlabApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
|
||||
CONSTRAINT "GitSource_githubAppId_fkey" FOREIGN KEY ("githubAppId") REFERENCES "GithubApp" ("id") ON DELETE SET NULL ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_GitSource" ("apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt") SELECT "apiUrl", "createdAt", "customPort", "forPublic", "githubAppId", "gitlabAppId", "htmlUrl", "id", "isSystemWide", "name", "organization", "type", "updatedAt" FROM "GitSource";
|
||||
DROP TABLE "GitSource";
|
||||
ALTER TABLE "new_GitSource" RENAME TO "GitSource";
|
||||
CREATE UNIQUE INDEX "GitSource_githubAppId_key" ON "GitSource"("githubAppId");
|
||||
CREATE UNIQUE INDEX "GitSource_gitlabAppId_key" ON "GitSource"("gitlabAppId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,24 @@
|
||||
-- RedefineTables
|
||||
PRAGMA foreign_keys=OFF;
|
||||
CREATE TABLE "new_ApplicationSettings" (
|
||||
"id" TEXT NOT NULL PRIMARY KEY,
|
||||
"applicationId" TEXT NOT NULL,
|
||||
"dualCerts" BOOLEAN NOT NULL DEFAULT false,
|
||||
"debug" BOOLEAN NOT NULL DEFAULT false,
|
||||
"previews" BOOLEAN NOT NULL DEFAULT false,
|
||||
"autodeploy" BOOLEAN NOT NULL DEFAULT true,
|
||||
"isBot" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isPublicRepository" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDBBranching" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCustomSSL" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isHttp2" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" DATETIME NOT NULL,
|
||||
CONSTRAINT "ApplicationSettings_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "Application" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
|
||||
);
|
||||
INSERT INTO "new_ApplicationSettings" ("applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt") SELECT "applicationId", "autodeploy", "createdAt", "debug", "dualCerts", "id", "isBot", "isCustomSSL", "isDBBranching", "isPublicRepository", "previews", "updatedAt" FROM "ApplicationSettings";
|
||||
DROP TABLE "ApplicationSettings";
|
||||
ALTER TABLE "new_ApplicationSettings" RENAME TO "ApplicationSettings";
|
||||
CREATE UNIQUE INDEX "ApplicationSettings_applicationId_key" ON "ApplicationSettings"("applicationId");
|
||||
PRAGMA foreign_key_check;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -186,6 +186,7 @@ model ApplicationSettings {
|
||||
isPublicRepository Boolean @default(false)
|
||||
isDBBranching Boolean @default(false)
|
||||
isCustomSSL Boolean @default(false)
|
||||
isHttp2 Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
application Application @relation(fields: [applicationId], references: [id])
|
||||
@@ -325,6 +326,7 @@ model GitSource {
|
||||
apiUrl String?
|
||||
htmlUrl String?
|
||||
customPort Int @default(22)
|
||||
customUser String @default("git")
|
||||
organization String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@ -156,7 +156,7 @@ const host = '0.0.0.0';
|
||||
graceful.listen();
|
||||
|
||||
setInterval(async () => {
|
||||
if (!scheduler.workers.has('deployApplication')) {
|
||||
if (!scheduler.workers.has('deployApplication')) {
|
||||
scheduler.run('deployApplication');
|
||||
}
|
||||
}, 2000);
|
||||
@@ -171,6 +171,11 @@ const host = '0.0.0.0';
|
||||
await cleanupStorage();
|
||||
}, 60000 * 15);
|
||||
|
||||
// Cleanup stucked containers (not defined in Coolify, but still running and managed by Coolify)
|
||||
setInterval(async () => {
|
||||
await cleanupStuckedContainers();
|
||||
}, 60000);
|
||||
|
||||
// checkProxies, checkFluentBit & refresh templates
|
||||
setInterval(async () => {
|
||||
await checkProxies();
|
||||
@@ -197,7 +202,13 @@ const host = '0.0.0.0';
|
||||
await copySSLCertificates();
|
||||
}, 10000);
|
||||
|
||||
await Promise.all([getTagsTemplates(), getArch(), getIPAddress(), configureRemoteDockers()]);
|
||||
await Promise.all([
|
||||
getTagsTemplates(),
|
||||
getArch(),
|
||||
getIPAddress(),
|
||||
configureRemoteDockers()
|
||||
// cleanupStuckedContainers()
|
||||
]);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
@@ -225,8 +236,22 @@ async function getTagsTemplates() {
|
||||
const { default: got } = await import('got');
|
||||
try {
|
||||
if (isDev) {
|
||||
const templates = await fs.readFile('./devTemplates.yaml', 'utf8');
|
||||
const tags = await fs.readFile('./devTags.json', 'utf8');
|
||||
let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
|
||||
let tags = await fs.readFile('./devTags.json', 'utf8');
|
||||
try {
|
||||
if (await fs.stat('./testTemplate.yaml')) {
|
||||
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
|
||||
}
|
||||
} catch (error) {}
|
||||
try {
|
||||
if (await fs.stat('./testTags.json')) {
|
||||
const testTags = await fs.readFile('./testTags.json', 'utf8');
|
||||
if (testTags.length > 0) {
|
||||
tags = JSON.stringify(JSON.parse(tags).concat(JSON.parse(testTags)));
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
|
||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates)));
|
||||
await fs.writeFile('./tags.json', tags);
|
||||
console.log('[004] Tags and templates loaded in dev mode...');
|
||||
@@ -297,6 +322,49 @@ async function getArch() {
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
async function cleanupStuckedContainers() {
|
||||
try {
|
||||
const destinationDockers = await prisma.destinationDocker.findMany();
|
||||
let enginesDone = new Set();
|
||||
for (const destination of destinationDockers) {
|
||||
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress))
|
||||
return;
|
||||
if (destination.engine) {
|
||||
enginesDone.add(destination.engine);
|
||||
}
|
||||
if (destination.remoteIpAddress) {
|
||||
if (!destination.remoteVerified) continue;
|
||||
enginesDone.add(destination.remoteIpAddress);
|
||||
}
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destination.id,
|
||||
command: `docker container ps -a --filter "label=coolify.managed=true" --format '{{ .Names}}'`
|
||||
});
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split('\n');
|
||||
if (containersArray.length > 0) {
|
||||
for (const container of containersArray) {
|
||||
const containerId = container.split('-')[0];
|
||||
const application = await prisma.application.findFirst({
|
||||
where: { id: { startsWith: containerId } }
|
||||
});
|
||||
const service = await prisma.service.findFirst({
|
||||
where: { id: { startsWith: containerId } }
|
||||
});
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id: { startsWith: containerId } }
|
||||
});
|
||||
if (!application && !service && !database) {
|
||||
await executeCommand({ command: `docker container rm -f ${container}` });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
async function configureRemoteDockers() {
|
||||
try {
|
||||
const remoteDocker = await prisma.destinationDocker.findMany({
|
||||
@@ -529,9 +597,13 @@ async function cleanupStorage() {
|
||||
let enginesDone = new Set();
|
||||
for (const destination of destinationDockers) {
|
||||
if (enginesDone.has(destination.engine) || enginesDone.has(destination.remoteIpAddress)) return;
|
||||
if (destination.engine) enginesDone.add(destination.engine);
|
||||
if (destination.remoteIpAddress) enginesDone.add(destination.remoteIpAddress);
|
||||
let force = false;
|
||||
if (destination.engine) {
|
||||
enginesDone.add(destination.engine);
|
||||
}
|
||||
if (destination.remoteIpAddress) {
|
||||
if (!destination.remoteVerified) continue;
|
||||
enginesDone.add(destination.remoteIpAddress);
|
||||
}
|
||||
let lowDiskSpace = false;
|
||||
try {
|
||||
let stdout = null;
|
||||
@@ -577,6 +649,8 @@ async function cleanupStorage() {
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
await cleanupDockerStorage(destination.id, lowDiskSpace, force);
|
||||
if (lowDiskSpace) {
|
||||
await cleanupDockerStorage(destination.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,7 +196,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await executeCommand({
|
||||
debug: true,
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
|
||||
});
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
} catch (error) {
|
||||
@@ -419,6 +419,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
githubAppId: gitSource.githubApp?.id,
|
||||
gitlabAppId: gitSource.gitlabApp?.id,
|
||||
customPort: gitSource.customPort,
|
||||
customUser: gitSource.customUser,
|
||||
gitCommitHash,
|
||||
configuration,
|
||||
repository,
|
||||
@@ -600,6 +601,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
}
|
||||
|
||||
if (buildPack === 'compose') {
|
||||
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||
try {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: destinationDockerId,
|
||||
@@ -629,7 +631,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
command: `docker compose --project-directory ${workdir} -f ${fileYaml} up -d`
|
||||
});
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
await prisma.build.update({
|
||||
@@ -724,7 +726,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await executeCommand({
|
||||
debug,
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
|
||||
});
|
||||
await saveBuildLog({ line: 'Deployed 🎉', buildId, applicationId });
|
||||
} catch (error) {
|
||||
|
||||
@@ -26,8 +26,10 @@ export default async function (data) {
|
||||
throw 'No Services found in docker-compose file.';
|
||||
}
|
||||
let envs = [];
|
||||
let buildEnvs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
|
||||
buildEnvs = [...buildEnvs, ...generateSecrets(secrets, pullmergeRequestId, true, null, true)];
|
||||
}
|
||||
|
||||
const composeVolumes = [];
|
||||
@@ -43,8 +45,34 @@ export default async function (data) {
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value['container_name'] = `${applicationId}-${key}`;
|
||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment']
|
||||
|
||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
|
||||
if (Object.keys(environment).length > 0) {
|
||||
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
|
||||
}
|
||||
value['environment'] = [...environment, ...envs];
|
||||
|
||||
let build = typeof value['build'] === 'undefined' ? [] : value['build'];
|
||||
if (typeof build === 'string') {
|
||||
build = { context: build };
|
||||
}
|
||||
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
|
||||
let finalArgs = [...buildEnvs];
|
||||
if (Object.keys(buildArgs).length > 0) {
|
||||
for (const arg of buildArgs) {
|
||||
const [key, _] = arg.split('=');
|
||||
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
|
||||
finalArgs.push(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (build.length > 0 || buildArgs.length > 0 ) {
|
||||
value['build'] = {
|
||||
...build,
|
||||
args: finalArgs
|
||||
};
|
||||
}
|
||||
|
||||
value['labels'] = labels;
|
||||
// TODO: If we support separated volume for each service, we need to add it here
|
||||
if (value['volumes']?.length > 0) {
|
||||
@@ -95,7 +123,7 @@ export default async function (data) {
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} pull`
|
||||
command: `docker compose --project-directory ${workdir} -f ${fileYaml} pull`
|
||||
});
|
||||
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||
await executeCommand({
|
||||
@@ -103,7 +131,7 @@ export default async function (data) {
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} build --progress plain`
|
||||
command: `docker compose --project-directory ${workdir} -f ${fileYaml} build --progress plain`
|
||||
});
|
||||
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||
}
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageForLaravel, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { workdir, applicationId, tag, buildId, port } = data;
|
||||
const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
|
||||
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
|
||||
|
||||
@@ -19,7 +19,7 @@ import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common
|
||||
import { scheduler } from './scheduler';
|
||||
import type { ExecaChildProcess } from 'execa';
|
||||
|
||||
export const version = '3.12.8';
|
||||
export const version = '3.12.18';
|
||||
export const isDev = process.env.NODE_ENV === 'development';
|
||||
export const sentryDSN =
|
||||
'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216';
|
||||
@@ -714,8 +714,10 @@ export async function startTraefikProxy(id: string): Promise<void> {
|
||||
--network coolify-infra \
|
||||
-p "80:80" \
|
||||
-p "443:443" \
|
||||
${isDev ? '-p "8080:8080"' : ''} \
|
||||
--name coolify-proxy \
|
||||
-d ${defaultTraefikImage} \
|
||||
${isDev ? '--api.insecure=true' : ''} \
|
||||
--entrypoints.web.address=:80 \
|
||||
--entrypoints.web.forwardedHeaders.insecure=true \
|
||||
--entrypoints.websecure.address=:443 \
|
||||
@@ -1712,78 +1714,24 @@ export function convertTolOldVolumeNames(type) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function cleanupDockerStorage(dockerId, lowDiskSpace, force) {
|
||||
// Cleanup old coolify images
|
||||
export async function cleanupDockerStorage(dockerId) {
|
||||
// Cleanup images that are not used by any container
|
||||
try {
|
||||
let { stdout: images } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker images coollabsio/coolify --filter before="coollabsio/coolify:${version}" -q | xargs -r`,
|
||||
shell: true
|
||||
});
|
||||
|
||||
images = images.trim();
|
||||
if (images) {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker rmi -f ${images}" -q | xargs -r`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
await executeCommand({ dockerId, command: `docker image prune -af` });
|
||||
} catch (error) {}
|
||||
if (lowDiskSpace || force) {
|
||||
// Cleanup images that are not used
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker image prune -f` });
|
||||
} catch (error) {}
|
||||
|
||||
const { numberOfDockerImagesKeptLocally } = await prisma.setting.findUnique({
|
||||
where: { id: '0' }
|
||||
});
|
||||
const { stdout: images } = await executeCommand({
|
||||
// Prune coolify managed containers
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker images|grep -v "<none>"|grep -v REPOSITORY|awk '{print $1, $2}'`,
|
||||
shell: true
|
||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||
});
|
||||
const imagesArray = images.trim().replaceAll(' ', ':').split('\n');
|
||||
const imagesSet = new Set(imagesArray.map((image) => image.split(':')[0]));
|
||||
let deleteImage = [];
|
||||
for (const image of imagesSet) {
|
||||
let keepImage = [];
|
||||
for (const image2 of imagesArray) {
|
||||
if (image2.startsWith(image)) {
|
||||
if (force) {
|
||||
deleteImage.push(image2);
|
||||
continue;
|
||||
}
|
||||
if (keepImage.length >= numberOfDockerImagesKeptLocally) {
|
||||
deleteImage.push(image2);
|
||||
} else {
|
||||
keepImage.push(image2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const image of deleteImage) {
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker image rm -f ${image}` });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
|
||||
// Prune coolify managed containers
|
||||
try {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||
});
|
||||
} catch (error) {}
|
||||
|
||||
// Cleanup build caches
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker builder prune -a -f` });
|
||||
} catch (error) {}
|
||||
}
|
||||
// Cleanup build caches
|
||||
try {
|
||||
await executeCommand({ dockerId, command: `docker builder prune -af` });
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
export function persistentVolumes(id, persistentStorage, config) {
|
||||
@@ -1884,11 +1832,36 @@ export async function pushToRegistry(
|
||||
});
|
||||
}
|
||||
|
||||
function parseSecret(secret, isBuild) {
|
||||
if (secret.value.includes('$')) {
|
||||
secret.value = secret.value.replaceAll('$', '$$$$');
|
||||
}
|
||||
if (secret.value.includes('\\n')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
} else if (secret.value.includes(' ')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}='${secret.value}'`;
|
||||
} else {
|
||||
return `${secret.name}='${secret.value}'`;
|
||||
}
|
||||
} else {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
export function generateSecrets(
|
||||
secrets: Array<any>,
|
||||
pullmergeRequestId: string,
|
||||
isBuild = false,
|
||||
port = null
|
||||
port = null,
|
||||
compose = false
|
||||
): Array<string> {
|
||||
const envs = [];
|
||||
const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret);
|
||||
@@ -1899,15 +1872,7 @@ export function generateSecrets(
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
if (build) {
|
||||
if (secret.value.includes(' ') || secret.value.includes('\\n')) {
|
||||
envs.push(`ARG ${secret.name}='${secret.value}'`);
|
||||
} else {
|
||||
envs.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
envs.push(parseSecret(secret, compose ? false : build));
|
||||
});
|
||||
}
|
||||
if (!pullmergeRequestId && normalSecrets.length > 0) {
|
||||
@@ -1916,15 +1881,7 @@ export function generateSecrets(
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
if (build) {
|
||||
if (secret.value.includes(' ') || secret.value.includes('\\n')) {
|
||||
envs.push(`ARG ${secret.name}='${secret.value}'`);
|
||||
} else {
|
||||
envs.push(`ARG ${secret.name}=${secret.value}`);
|
||||
}
|
||||
} else {
|
||||
envs.push(`${secret.name}=${secret.value}`);
|
||||
}
|
||||
envs.push(parseSecret(secret, compose ? false : build));
|
||||
});
|
||||
}
|
||||
const portFound = envs.filter((env) => env.startsWith('PORT'));
|
||||
|
||||
@@ -12,7 +12,8 @@ export default async function ({
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort,
|
||||
forPublic
|
||||
forPublic,
|
||||
customUser,
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
@@ -25,6 +26,7 @@ export default async function ({
|
||||
privateSshKey: string;
|
||||
customPort: number;
|
||||
forPublic: boolean;
|
||||
customUser: string;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
if (!forPublic) {
|
||||
@@ -53,7 +55,7 @@ export default async function ({
|
||||
} else {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} git@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
`git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -122,6 +122,9 @@ export async function cleanupUnconfiguredApplications(request: FastifyRequest<an
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
for (const application of applications) {
|
||||
if (application?.buildPack === 'compose') {
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
!application.buildPack ||
|
||||
!application.destinationDockerId ||
|
||||
@@ -500,14 +503,24 @@ export async function saveApplicationSettings(
|
||||
projectId,
|
||||
isBot,
|
||||
isDBBranching,
|
||||
isCustomSSL
|
||||
isCustomSSL,
|
||||
isHttp2
|
||||
} = request.body;
|
||||
await prisma.application.update({
|
||||
where: { id },
|
||||
data: {
|
||||
fqdn: isBot ? null : undefined,
|
||||
settings: {
|
||||
update: { debug, previews, dualCerts, autodeploy, isBot, isDBBranching, isCustomSSL }
|
||||
update: {
|
||||
debug,
|
||||
previews,
|
||||
dualCerts,
|
||||
autodeploy,
|
||||
isBot,
|
||||
isDBBranching,
|
||||
isCustomSSL,
|
||||
isHttp2
|
||||
}
|
||||
}
|
||||
},
|
||||
include: { destinationDocker: true }
|
||||
@@ -670,7 +683,7 @@ export async function restartApplication(
|
||||
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
|
||||
});
|
||||
return reply.code(201).send();
|
||||
}
|
||||
@@ -719,14 +732,15 @@ export async function deleteApplication(
|
||||
) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { force } = request.body;
|
||||
|
||||
const { teamId } = request.user;
|
||||
const application = await prisma.application.findUnique({
|
||||
where: { id },
|
||||
include: { destinationDocker: true }
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
if (!force && application?.destinationDockerId && application.destinationDocker?.network) {
|
||||
if (!application.teams.find((team) => team.id === teamId) || teamId !== '0') {
|
||||
throw { status: 403, message: 'You are not allowed to delete this application.' };
|
||||
}
|
||||
if (application?.destinationDocker?.id && application.destinationDocker?.network) {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter network=${application.destinationDocker.network} --filter name=${id} --format '{{json .}}'`
|
||||
@@ -746,6 +760,7 @@ export async function deleteApplication(
|
||||
await prisma.secret.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: id } });
|
||||
await prisma.previewApplication.deleteMany({ where: { applicationId: id } });
|
||||
if (teamId === '0') {
|
||||
await prisma.application.deleteMany({ where: { id } });
|
||||
} else {
|
||||
@@ -764,7 +779,9 @@ export async function checkDomain(request: FastifyRequest<CheckDomain>) {
|
||||
fqdn,
|
||||
settings: { dualCerts }
|
||||
} = await prisma.application.findUnique({ where: { id }, include: { settings: true } });
|
||||
return await checkDomainsIsValidInDNS({ hostname: domain, fqdn, dualCerts });
|
||||
// TODO: Disabled this because it is having problems with remote docker engines.
|
||||
// return await checkDomainsIsValidInDNS({ hostname: domain, fqdn, dualCerts });
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
@@ -805,11 +822,12 @@ export async function checkDNS(request: FastifyRequest<CheckDNS>) {
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
});
|
||||
if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||
let hostname = request.hostname.split(':')[0];
|
||||
if (remoteEngine) hostname = remoteIpAddress;
|
||||
return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts });
|
||||
}
|
||||
// TODO: Disabled this because it is having problems with remote docker engines.
|
||||
// if (isDNSCheckEnabled && !isDev && !forceSave) {
|
||||
// let hostname = request.hostname.split(':')[0];
|
||||
// if (remoteEngine) hostname = remoteIpAddress;
|
||||
// return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts });
|
||||
// }
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
@@ -842,15 +860,16 @@ export async function getDockerImages(request) {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}' | grep -i ${id} | grep -v cache`,
|
||||
shell: true
|
||||
command: `docker images --format '{{.Repository}}#{{.Tag}}#{{.CreatedAt}}'`
|
||||
});
|
||||
const { stdout: runningImage } = await executeCommand({
|
||||
dockerId: application.destinationDocker.id,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.service=${id}' --format {{.Image}}`
|
||||
});
|
||||
const images = stdout.trim().split('\n');
|
||||
|
||||
const images = stdout
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((image) => image.includes(id) && !image.includes('-cache'));
|
||||
for (const image of images) {
|
||||
const [repository, tag, createdAt] = image.split('#');
|
||||
if (tag.includes('-')) {
|
||||
@@ -871,6 +890,7 @@ export async function getDockerImages(request) {
|
||||
runningImage
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
imagesAvailables
|
||||
};
|
||||
@@ -1446,7 +1466,7 @@ export async function restartPreview(
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}-${pullmergeRequestId}` });
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} up -d`
|
||||
command: `docker compose --project-directory ${workdir} -f ${workdir}/docker-compose.yml up -d`
|
||||
});
|
||||
return reply.code(201).send();
|
||||
}
|
||||
@@ -1600,12 +1620,7 @@ export async function getApplicationLogs(request: FastifyRequest<GetApplicationL
|
||||
.split('\n')
|
||||
.map((l) => ansi(l))
|
||||
.filter((a) => a);
|
||||
const logs = stripLogsStderr.concat(stripLogsStdout);
|
||||
const sortedLogs = logs.sort((a, b) =>
|
||||
day(a.split(' ')[0]).isAfter(day(b.split(' ')[0])) ? 1 : -1
|
||||
);
|
||||
return { logs: sortedLogs };
|
||||
// }
|
||||
return { logs: stripLogsStderr.concat(stripLogsStdout) };
|
||||
} catch (error) {
|
||||
const { statusCode, stderr } = error;
|
||||
if (stderr.startsWith('Error: No such container')) {
|
||||
|
||||
@@ -1,154 +1,170 @@
|
||||
import type { OnlyId } from "../../../../types";
|
||||
import type { OnlyId } from '../../../../types';
|
||||
|
||||
export interface SaveApplication extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
buildPack: string,
|
||||
fqdn: string,
|
||||
port: number,
|
||||
exposePort: number,
|
||||
installCommand: string,
|
||||
buildCommand: string,
|
||||
startCommand: string,
|
||||
baseDirectory: string,
|
||||
publishDirectory: string,
|
||||
pythonWSGI: string,
|
||||
pythonModule: string,
|
||||
pythonVariable: string,
|
||||
dockerFileLocation: string,
|
||||
denoMainFile: string,
|
||||
denoOptions: string,
|
||||
baseImage: string,
|
||||
gitCommitHash: string,
|
||||
baseBuildImage: string,
|
||||
deploymentType: string,
|
||||
baseDatabaseBranch: string,
|
||||
dockerComposeFile: string,
|
||||
dockerComposeFileLocation: string,
|
||||
dockerComposeConfiguration: string,
|
||||
simpleDockerfile: string,
|
||||
dockerRegistryImageName: string
|
||||
}
|
||||
Body: {
|
||||
name: string;
|
||||
buildPack: string;
|
||||
fqdn: string;
|
||||
port: number;
|
||||
exposePort: number;
|
||||
installCommand: string;
|
||||
buildCommand: string;
|
||||
startCommand: string;
|
||||
baseDirectory: string;
|
||||
publishDirectory: string;
|
||||
pythonWSGI: string;
|
||||
pythonModule: string;
|
||||
pythonVariable: string;
|
||||
dockerFileLocation: string;
|
||||
denoMainFile: string;
|
||||
denoOptions: string;
|
||||
baseImage: string;
|
||||
gitCommitHash: string;
|
||||
baseBuildImage: string;
|
||||
deploymentType: string;
|
||||
baseDatabaseBranch: string;
|
||||
dockerComposeFile: string;
|
||||
dockerComposeFileLocation: string;
|
||||
dockerComposeConfiguration: string;
|
||||
simpleDockerfile: string;
|
||||
dockerRegistryImageName: string;
|
||||
};
|
||||
}
|
||||
export interface SaveApplicationSettings extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Body: { debug: boolean; previews: boolean; dualCerts: boolean; autodeploy: boolean; branch: string; projectId: number; isBot: boolean; isDBBranching: boolean, isCustomSSL: boolean };
|
||||
Querystring: { domain: string };
|
||||
Body: {
|
||||
debug: boolean;
|
||||
previews: boolean;
|
||||
dualCerts: boolean;
|
||||
autodeploy: boolean;
|
||||
branch: string;
|
||||
projectId: number;
|
||||
isBot: boolean;
|
||||
isDBBranching: boolean;
|
||||
isCustomSSL: boolean;
|
||||
isHttp2: boolean;
|
||||
};
|
||||
}
|
||||
export interface DeleteApplication extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Body: { force: boolean }
|
||||
Querystring: { domain: string };
|
||||
Body: { force: boolean };
|
||||
}
|
||||
export interface CheckDomain extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Querystring: { domain: string };
|
||||
}
|
||||
export interface CheckDNS extends OnlyId {
|
||||
Querystring: { domain: string; };
|
||||
Body: {
|
||||
exposePort: number,
|
||||
fqdn: string,
|
||||
forceSave: boolean,
|
||||
dualCerts: boolean
|
||||
}
|
||||
Querystring: { domain: string };
|
||||
Body: {
|
||||
exposePort: number;
|
||||
fqdn: string;
|
||||
forceSave: boolean;
|
||||
dualCerts: boolean;
|
||||
};
|
||||
}
|
||||
export interface DeployApplication {
|
||||
Querystring: { domain: string }
|
||||
Body: { pullmergeRequestId: string | null, branch: string, forceRebuild?: boolean }
|
||||
Querystring: { domain: string };
|
||||
Body: { pullmergeRequestId: string | null; branch: string; forceRebuild?: boolean };
|
||||
}
|
||||
export interface GetImages {
|
||||
Body: { buildPack: string, deploymentType: string }
|
||||
Body: { buildPack: string; deploymentType: string };
|
||||
}
|
||||
export interface SaveApplicationSource extends OnlyId {
|
||||
Body: { gitSourceId?: string | null, forPublic?: boolean, type?: string, simpleDockerfile?: string }
|
||||
Body: {
|
||||
gitSourceId?: string | null;
|
||||
forPublic?: boolean;
|
||||
type?: string;
|
||||
simpleDockerfile?: string;
|
||||
};
|
||||
}
|
||||
export interface CheckRepository extends OnlyId {
|
||||
Querystring: { repository: string, branch: string }
|
||||
Querystring: { repository: string; branch: string };
|
||||
}
|
||||
export interface SaveDestination extends OnlyId {
|
||||
Body: { destinationId: string }
|
||||
Body: { destinationId: string };
|
||||
}
|
||||
export interface SaveSecret extends OnlyId {
|
||||
Body: {
|
||||
name: string,
|
||||
value: string,
|
||||
isBuildSecret: boolean,
|
||||
previewSecret: boolean,
|
||||
isNew: boolean
|
||||
}
|
||||
Body: {
|
||||
name: string;
|
||||
value: string;
|
||||
isBuildSecret: boolean;
|
||||
previewSecret: boolean;
|
||||
isNew: boolean;
|
||||
};
|
||||
}
|
||||
export interface DeleteSecret extends OnlyId {
|
||||
Body: { name: string }
|
||||
Body: { name: string };
|
||||
}
|
||||
export interface SaveStorage extends OnlyId {
|
||||
Body: {
|
||||
path: string,
|
||||
newStorage: boolean,
|
||||
storageId: string
|
||||
}
|
||||
Body: {
|
||||
path: string;
|
||||
newStorage: boolean;
|
||||
storageId: string;
|
||||
};
|
||||
}
|
||||
export interface DeleteStorage extends OnlyId {
|
||||
Body: {
|
||||
path: string,
|
||||
}
|
||||
Body: {
|
||||
path: string;
|
||||
};
|
||||
}
|
||||
export interface GetApplicationLogs {
|
||||
Params: {
|
||||
id: string,
|
||||
containerId: string
|
||||
}
|
||||
Querystring: {
|
||||
since: number,
|
||||
}
|
||||
Params: {
|
||||
id: string;
|
||||
containerId: string;
|
||||
};
|
||||
Querystring: {
|
||||
since: number;
|
||||
};
|
||||
}
|
||||
export interface GetBuilds extends OnlyId {
|
||||
Querystring: {
|
||||
buildId: string
|
||||
skip: number,
|
||||
}
|
||||
Querystring: {
|
||||
buildId: string;
|
||||
skip: number;
|
||||
};
|
||||
}
|
||||
export interface GetBuildIdLogs {
|
||||
Params: {
|
||||
id: string,
|
||||
buildId: string
|
||||
},
|
||||
Querystring: {
|
||||
sequence: number
|
||||
}
|
||||
Params: {
|
||||
id: string;
|
||||
buildId: string;
|
||||
};
|
||||
Querystring: {
|
||||
sequence: number;
|
||||
};
|
||||
}
|
||||
export interface SaveDeployKey extends OnlyId {
|
||||
Body: {
|
||||
deployKeyId: number
|
||||
}
|
||||
Body: {
|
||||
deployKeyId: number;
|
||||
};
|
||||
}
|
||||
export interface CancelDeployment {
|
||||
Body: {
|
||||
buildId: string,
|
||||
applicationId: string
|
||||
}
|
||||
Body: {
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
};
|
||||
}
|
||||
export interface DeployApplication extends OnlyId {
|
||||
Body: {
|
||||
pullmergeRequestId: string | null,
|
||||
branch: string,
|
||||
forceRebuild?: boolean
|
||||
}
|
||||
Body: {
|
||||
pullmergeRequestId: string | null;
|
||||
branch: string;
|
||||
forceRebuild?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export interface StopPreviewApplication extends OnlyId {
|
||||
Body: {
|
||||
pullmergeRequestId: string | null,
|
||||
}
|
||||
Body: {
|
||||
pullmergeRequestId: string | null;
|
||||
};
|
||||
}
|
||||
export interface RestartPreviewApplication {
|
||||
Params: {
|
||||
id: string,
|
||||
pullmergeRequestId: string | null,
|
||||
}
|
||||
Params: {
|
||||
id: string;
|
||||
pullmergeRequestId: string | null;
|
||||
};
|
||||
}
|
||||
export interface RestartApplication {
|
||||
Params: {
|
||||
id: string,
|
||||
},
|
||||
Body: {
|
||||
imageId: string | null,
|
||||
}
|
||||
}
|
||||
Params: {
|
||||
id: string;
|
||||
};
|
||||
Body: {
|
||||
imageId: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ export interface SaveDatabaseType extends OnlyId {
|
||||
Body: { type: string }
|
||||
}
|
||||
export interface DeleteDatabase extends OnlyId {
|
||||
Body: { force: string }
|
||||
Body: { }
|
||||
}
|
||||
export interface SaveVersion extends OnlyId {
|
||||
Body: {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { compareVersions } from "compare-versions";
|
||||
import cuid from "cuid";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { compareVersions } from 'compare-versions';
|
||||
import cuid from 'cuid';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import fs from 'fs/promises';
|
||||
import yaml from 'js-yaml';
|
||||
import {
|
||||
@@ -13,12 +13,12 @@ import {
|
||||
uniqueName,
|
||||
version,
|
||||
sentryDSN,
|
||||
executeCommand,
|
||||
} from "../../../lib/common";
|
||||
import { scheduler } from "../../../lib/scheduler";
|
||||
import type { FastifyReply, FastifyRequest } from "fastify";
|
||||
import type { Login, Update } from ".";
|
||||
import type { GetCurrentUser } from "./types";
|
||||
executeCommand
|
||||
} from '../../../lib/common';
|
||||
import { scheduler } from '../../../lib/scheduler';
|
||||
import type { FastifyReply, FastifyRequest } from 'fastify';
|
||||
import type { Login, Update } from '.';
|
||||
import type { GetCurrentUser } from './types';
|
||||
|
||||
export async function hashPassword(password: string): Promise<string> {
|
||||
const saltRounds = 15;
|
||||
@@ -29,9 +29,9 @@ export async function backup(request: FastifyRequest) {
|
||||
try {
|
||||
const { backupData } = request.params;
|
||||
let std = null;
|
||||
const [id, backupType, type, zipped, storage] = backupData.split(':')
|
||||
console.log(id, backupType, type, zipped, storage)
|
||||
const database = await prisma.database.findUnique({ where: { id } })
|
||||
const [id, backupType, type, zipped, storage] = backupData.split(':');
|
||||
console.log(id, backupType, type, zipped, storage);
|
||||
const database = await prisma.database.findUnique({ where: { id } });
|
||||
if (database) {
|
||||
// await executeDockerCmd({
|
||||
// dockerId: database.destinationDockerId,
|
||||
@@ -40,8 +40,7 @@ export async function backup(request: FastifyRequest) {
|
||||
std = await executeCommand({
|
||||
dockerId: database.destinationDockerId,
|
||||
command: `docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v coolify-local-backup:/app/backups -e CONTAINERS_TO_BACKUP="${backupData}" coollabsio/backup`
|
||||
})
|
||||
|
||||
});
|
||||
}
|
||||
if (std.stdout) {
|
||||
return std.stdout;
|
||||
@@ -58,9 +57,9 @@ export async function cleanupManually(request: FastifyRequest) {
|
||||
try {
|
||||
const { serverId } = request.body;
|
||||
const destination = await prisma.destinationDocker.findUnique({
|
||||
where: { id: serverId },
|
||||
where: { id: serverId }
|
||||
});
|
||||
await cleanupDockerStorage(destination.id, true, true);
|
||||
await cleanupDockerStorage(destination.id);
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
@@ -68,17 +67,25 @@ export async function cleanupManually(request: FastifyRequest) {
|
||||
}
|
||||
export async function refreshTags() {
|
||||
try {
|
||||
const { default: got } = await import('got')
|
||||
const { default: got } = await import('got');
|
||||
try {
|
||||
if (isDev) {
|
||||
const tags = await fs.readFile('./devTags.json', 'utf8')
|
||||
await fs.writeFile('./tags.json', tags)
|
||||
let tags = await fs.readFile('./devTags.json', 'utf8');
|
||||
try {
|
||||
if (await fs.stat('./testTags.json')) {
|
||||
const testTags = await fs.readFile('./testTags.json', 'utf8');
|
||||
if (testTags.length > 0) {
|
||||
tags = JSON.parse(tags).concat(JSON.parse(testTags));
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
await fs.writeFile('./tags.json', tags);
|
||||
} else {
|
||||
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text()
|
||||
await fs.writeFile('/app/tags.json', tags)
|
||||
const tags = await got.get('https://get.coollabs.io/coolify/service-tags.json').text();
|
||||
await fs.writeFile('/app/tags.json', tags);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
return {};
|
||||
@@ -88,17 +95,25 @@ export async function refreshTags() {
|
||||
}
|
||||
export async function refreshTemplates() {
|
||||
try {
|
||||
const { default: got } = await import('got')
|
||||
const { default: got } = await import('got');
|
||||
try {
|
||||
if (isDev) {
|
||||
const response = await fs.readFile('./devTemplates.yaml', 'utf8')
|
||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(response)))
|
||||
let templates = await fs.readFile('./devTemplates.yaml', 'utf8');
|
||||
try {
|
||||
if (await fs.stat('./testTemplate.yaml')) {
|
||||
templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8'));
|
||||
}
|
||||
} catch (error) {}
|
||||
const response = await fs.readFile('./devTemplates.yaml', 'utf8');
|
||||
await fs.writeFile('./templates.json', JSON.stringify(yaml.load(response)));
|
||||
} else {
|
||||
const response = await got.get('https://get.coollabs.io/coolify/service-templates.yaml').text()
|
||||
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)))
|
||||
const response = await got
|
||||
.get('https://get.coollabs.io/coolify/service-templates.yaml')
|
||||
.text();
|
||||
await fs.writeFile('/app/templates.json', JSON.stringify(yaml.load(response)));
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log(error);
|
||||
}
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
@@ -107,28 +122,29 @@ export async function refreshTemplates() {
|
||||
}
|
||||
export async function checkUpdate(request: FastifyRequest) {
|
||||
try {
|
||||
const { default: got } = await import('got')
|
||||
const { default: got } = await import('got');
|
||||
const isStaging =
|
||||
request.hostname === "staging.coolify.io" ||
|
||||
request.hostname === "arm.coolify.io";
|
||||
request.hostname === 'staging.coolify.io' || request.hostname === 'arm.coolify.io';
|
||||
const currentVersion = version;
|
||||
const { coolify } = await got.get('https://get.coollabs.io/versions.json', {
|
||||
searchParams: {
|
||||
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||
version: currentVersion
|
||||
}
|
||||
}).json()
|
||||
const { coolify } = await got
|
||||
.get('https://get.coollabs.io/versions.json', {
|
||||
searchParams: {
|
||||
appId: process.env['COOLIFY_APP_ID'] || undefined,
|
||||
version: currentVersion
|
||||
}
|
||||
})
|
||||
.json();
|
||||
const latestVersion = coolify.main.version;
|
||||
const isUpdateAvailable = compareVersions(latestVersion, currentVersion);
|
||||
if (isStaging) {
|
||||
return {
|
||||
isUpdateAvailable: true,
|
||||
latestVersion: "next",
|
||||
latestVersion: 'next'
|
||||
};
|
||||
}
|
||||
return {
|
||||
isUpdateAvailable: isStaging ? true : isUpdateAvailable === 1,
|
||||
latestVersion,
|
||||
latestVersion
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
@@ -142,8 +158,13 @@ export async function update(request: FastifyRequest<Update>) {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
||||
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
||||
await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` });
|
||||
await executeCommand({ shell: true, command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"` });
|
||||
await executeCommand({
|
||||
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
});
|
||||
await executeCommand({
|
||||
shell: true,
|
||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
});
|
||||
return {};
|
||||
} else {
|
||||
await asyncSleep(2000);
|
||||
@@ -156,12 +177,12 @@ export async function update(request: FastifyRequest<Update>) {
|
||||
export async function resetQueue(request: FastifyRequest<any>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
if (teamId === "0") {
|
||||
if (teamId === '0') {
|
||||
await prisma.build.updateMany({
|
||||
where: { status: { in: ["queued", "running"] } },
|
||||
data: { status: "canceled" },
|
||||
where: { status: { in: ['queued', 'running'] } },
|
||||
data: { status: 'canceled' }
|
||||
});
|
||||
scheduler.workers.get("deployApplication").postMessage("cancel");
|
||||
scheduler.workers.get('deployApplication').postMessage('cancel');
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
@@ -170,7 +191,7 @@ export async function resetQueue(request: FastifyRequest<any>) {
|
||||
export async function restartCoolify(request: FastifyRequest<any>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
if (teamId === "0") {
|
||||
if (teamId === '0') {
|
||||
if (!isDev) {
|
||||
await executeCommand({ command: `docker restart coolify` });
|
||||
return {};
|
||||
@@ -180,7 +201,7 @@ export async function restartCoolify(request: FastifyRequest<any>) {
|
||||
}
|
||||
throw {
|
||||
status: 500,
|
||||
message: "You are not authorized to restart Coolify.",
|
||||
message: 'You are not authorized to restart Coolify.'
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
@@ -192,43 +213,52 @@ export async function showDashboard(request: FastifyRequest) {
|
||||
const userId = request.user.userId;
|
||||
const teamId = request.user.teamId;
|
||||
let applications = await prisma.application.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
const databases = await prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true },
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
const services = await prisma.service.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, teams: true },
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
const gitSources = await prisma.gitSource.findMany({
|
||||
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { teams: true },
|
||||
where: {
|
||||
OR: [
|
||||
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
{ isSystemWide: true }
|
||||
]
|
||||
},
|
||||
include: { teams: true }
|
||||
});
|
||||
const destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id: teamId === "0" ? undefined : teamId } } },
|
||||
include: { teams: true },
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
const settings = await listSettings();
|
||||
|
||||
let foundUnconfiguredApplication = false;
|
||||
for (const application of applications) {
|
||||
if (((!application.buildPack || !application.branch) && !application.simpleDockerfile) || !application.destinationDockerId || (!application.settings?.isBot && !application?.fqdn) && application.buildPack !== "compose") {
|
||||
foundUnconfiguredApplication = true
|
||||
if (
|
||||
((!application.buildPack || !application.branch) && !application.simpleDockerfile) ||
|
||||
!application.destinationDockerId ||
|
||||
(!application.settings?.isBot && !application?.fqdn && application.buildPack !== 'compose')
|
||||
) {
|
||||
foundUnconfiguredApplication = true;
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredService = false;
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
foundUnconfiguredService = true
|
||||
foundUnconfiguredService = true;
|
||||
}
|
||||
}
|
||||
let foundUnconfiguredDatabase = false;
|
||||
for (const database of databases) {
|
||||
if (!database.version) {
|
||||
foundUnconfiguredDatabase = true
|
||||
foundUnconfiguredDatabase = true;
|
||||
}
|
||||
}
|
||||
return {
|
||||
@@ -240,101 +270,94 @@ export async function showDashboard(request: FastifyRequest) {
|
||||
services,
|
||||
gitSources,
|
||||
destinations,
|
||||
settings,
|
||||
settings
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function login(
|
||||
request: FastifyRequest<Login>,
|
||||
reply: FastifyReply
|
||||
) {
|
||||
export async function login(request: FastifyRequest<Login>, reply: FastifyReply) {
|
||||
if (request.user) {
|
||||
return reply.redirect("/dashboard");
|
||||
return reply.redirect('/dashboard');
|
||||
} else {
|
||||
const { email, password, isLogin } = request.body || {};
|
||||
if (!email || !password) {
|
||||
throw { status: 500, message: "Email and password are required." };
|
||||
throw { status: 500, message: 'Email and password are required.' };
|
||||
}
|
||||
const users = await prisma.user.count();
|
||||
const userFound = await prisma.user.findUnique({
|
||||
where: { email },
|
||||
include: { teams: true, permission: true },
|
||||
rejectOnNotFound: false,
|
||||
rejectOnNotFound: false
|
||||
});
|
||||
if (!userFound && isLogin) {
|
||||
throw { status: 500, message: "User not found." };
|
||||
throw { status: 500, message: 'User not found.' };
|
||||
}
|
||||
const { isRegistrationEnabled, id } = await prisma.setting.findFirst();
|
||||
let uid = cuid();
|
||||
let permission = "read";
|
||||
let permission = 'read';
|
||||
let isAdmin = false;
|
||||
|
||||
if (users === 0) {
|
||||
await prisma.setting.update({
|
||||
where: { id },
|
||||
data: { isRegistrationEnabled: false },
|
||||
data: { isRegistrationEnabled: false }
|
||||
});
|
||||
uid = "0";
|
||||
uid = '0';
|
||||
}
|
||||
if (userFound) {
|
||||
if (userFound.type === "email") {
|
||||
if (userFound.password === "RESETME") {
|
||||
if (userFound.type === 'email') {
|
||||
if (userFound.password === 'RESETME') {
|
||||
const hashedPassword = await hashPassword(password);
|
||||
if (userFound.updatedAt < new Date(Date.now() - 1000 * 60 * 10)) {
|
||||
if (userFound.id === "0") {
|
||||
if (userFound.id === '0') {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: "RESETME" },
|
||||
data: { password: 'RESETME' }
|
||||
});
|
||||
} else {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: "RESETTIMEOUT" },
|
||||
data: { password: 'RESETTIMEOUT' }
|
||||
});
|
||||
}
|
||||
|
||||
throw {
|
||||
status: 500,
|
||||
message:
|
||||
"Password reset link has expired. Please request a new one.",
|
||||
message: 'Password reset link has expired. Please request a new one.'
|
||||
};
|
||||
} else {
|
||||
await prisma.user.update({
|
||||
where: { email: userFound.email },
|
||||
data: { password: hashedPassword },
|
||||
data: { password: hashedPassword }
|
||||
});
|
||||
return {
|
||||
userId: userFound.id,
|
||||
teamId: userFound.id,
|
||||
permission: userFound.permission,
|
||||
isAdmin: true,
|
||||
isAdmin: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const passwordMatch = await bcrypt.compare(
|
||||
password,
|
||||
userFound.password
|
||||
);
|
||||
const passwordMatch = await bcrypt.compare(password, userFound.password);
|
||||
if (!passwordMatch) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: "Wrong password or email address.",
|
||||
message: 'Wrong password or email address.'
|
||||
};
|
||||
}
|
||||
uid = userFound.id;
|
||||
isAdmin = true;
|
||||
}
|
||||
} else {
|
||||
permission = "owner";
|
||||
permission = 'owner';
|
||||
isAdmin = true;
|
||||
if (!isRegistrationEnabled) {
|
||||
throw {
|
||||
status: 404,
|
||||
message: "Registration disabled by administrator.",
|
||||
message: 'Registration disabled by administrator.'
|
||||
};
|
||||
}
|
||||
const hashedPassword = await hashPassword(password);
|
||||
@@ -344,17 +367,17 @@ export async function login(
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: "email",
|
||||
type: 'email',
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName(),
|
||||
destinationDocker: { connect: { network: "coolify" } },
|
||||
},
|
||||
destinationDocker: { connect: { network: 'coolify' } }
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: "owner" } },
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
},
|
||||
include: { teams: true },
|
||||
include: { teams: true }
|
||||
});
|
||||
} else {
|
||||
await prisma.user.create({
|
||||
@@ -362,16 +385,16 @@ export async function login(
|
||||
id: uid,
|
||||
email,
|
||||
password: hashedPassword,
|
||||
type: "email",
|
||||
type: 'email',
|
||||
teams: {
|
||||
create: {
|
||||
id: uid,
|
||||
name: uniqueName(),
|
||||
},
|
||||
name: uniqueName()
|
||||
}
|
||||
},
|
||||
permission: { create: { teamId: uid, permission: "owner" } },
|
||||
permission: { create: { teamId: uid, permission: 'owner' } }
|
||||
},
|
||||
include: { teams: true },
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -379,23 +402,20 @@ export async function login(
|
||||
userId: uid,
|
||||
teamId: uid,
|
||||
permission,
|
||||
isAdmin,
|
||||
isAdmin
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function getCurrentUser(
|
||||
request: FastifyRequest<GetCurrentUser>,
|
||||
fastify
|
||||
) {
|
||||
export async function getCurrentUser(request: FastifyRequest<GetCurrentUser>, fastify) {
|
||||
let token = null;
|
||||
const { teamId } = request.query;
|
||||
try {
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: request.user.userId },
|
||||
where: { id: request.user.userId }
|
||||
});
|
||||
if (!user) {
|
||||
throw "User not found";
|
||||
throw 'User not found';
|
||||
}
|
||||
} catch (error) {
|
||||
throw { status: 401, message: error };
|
||||
@@ -404,17 +424,15 @@ export async function getCurrentUser(
|
||||
try {
|
||||
const user = await prisma.user.findFirst({
|
||||
where: { id: request.user.userId, teams: { some: { id: teamId } } },
|
||||
include: { teams: true, permission: true },
|
||||
include: { teams: true, permission: true }
|
||||
});
|
||||
if (user) {
|
||||
const permission = user.permission.find(
|
||||
(p) => p.teamId === teamId
|
||||
).permission;
|
||||
const permission = user.permission.find((p) => p.teamId === teamId).permission;
|
||||
const payload = {
|
||||
...request.user,
|
||||
teamId,
|
||||
permission: permission || null,
|
||||
isAdmin: permission === "owner" || permission === "admin",
|
||||
isAdmin: permission === 'owner' || permission === 'admin'
|
||||
};
|
||||
token = fastify.jwt.sign(payload);
|
||||
}
|
||||
@@ -422,12 +440,14 @@ export async function getCurrentUser(
|
||||
// No new token -> not switching teams
|
||||
}
|
||||
}
|
||||
const pendingInvitations = await prisma.teamInvitation.findMany({ where: { uid: request.user.userId } })
|
||||
const pendingInvitations = await prisma.teamInvitation.findMany({
|
||||
where: { uid: request.user.userId }
|
||||
});
|
||||
return {
|
||||
settings: await prisma.setting.findUnique({ where: { id: "0" } }),
|
||||
settings: await prisma.setting.findUnique({ where: { id: '0' } }),
|
||||
sentryDSN,
|
||||
pendingInvitations,
|
||||
token,
|
||||
...request.user,
|
||||
...request.user
|
||||
};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,11 +22,11 @@ export async function listSources(request: FastifyRequest) {
|
||||
export async function saveSource(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, htmlUrl, apiUrl, customPort, isSystemWide } = request.body
|
||||
let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body
|
||||
if (customPort) customPort = Number(customPort)
|
||||
await prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { name, htmlUrl, apiUrl, customPort, isSystemWide }
|
||||
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
@@ -48,6 +48,7 @@ export async function getSource(request: FastifyRequest<OnlyId>) {
|
||||
apiUrl: null,
|
||||
organization: null,
|
||||
customPort: 22,
|
||||
customUser: 'git',
|
||||
},
|
||||
settings
|
||||
}
|
||||
@@ -133,7 +134,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort } =
|
||||
let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort, customUser } =
|
||||
request.body
|
||||
|
||||
if (oauthId) oauthId = Number(oauthId);
|
||||
@@ -142,7 +143,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
|
||||
|
||||
if (id === 'new') {
|
||||
const newId = cuid()
|
||||
await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, teams: { connect: { id: teamId } } } });
|
||||
await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, customUser, teams: { connect: { id: teamId } } } });
|
||||
await prisma.gitlabApp.create({
|
||||
data: {
|
||||
teams: { connect: { id: teamId } },
|
||||
@@ -158,7 +159,7 @@ export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>
|
||||
id: newId
|
||||
}
|
||||
} else {
|
||||
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort } });
|
||||
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort, customUser } });
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id },
|
||||
data: {
|
||||
|
||||
@@ -21,6 +21,7 @@ export interface SaveGitLabSource extends OnlyId {
|
||||
appSecret: string,
|
||||
groupName: string,
|
||||
customPort: number,
|
||||
customUser: string,
|
||||
}
|
||||
}
|
||||
export interface CheckGitLabOAuthId extends OnlyId {
|
||||
|
||||
@@ -1,9 +1,32 @@
|
||||
import { FastifyRequest } from "fastify";
|
||||
import { errorHandler, getDomain, isDev, prisma, executeCommand } from "../../../lib/common";
|
||||
import { getTemplates } from "../../../lib/services";
|
||||
import { OnlyId } from "../../../types";
|
||||
import { FastifyRequest } from 'fastify';
|
||||
import { errorHandler, getDomain, isDev, prisma, executeCommand } from '../../../lib/common';
|
||||
import { getTemplates } from '../../../lib/services';
|
||||
import { OnlyId } from '../../../types';
|
||||
import { parseAndFindServiceTemplates } from '../../api/v1/services/handlers';
|
||||
|
||||
function generateServices(serviceId, containerId, port) {
|
||||
function generateServices(serviceId, containerId, port, isHttp2 = false, isHttps = false) {
|
||||
if (isHttp2) {
|
||||
return {
|
||||
[serviceId]: {
|
||||
loadbalancer: {
|
||||
servers: [
|
||||
{
|
||||
url: `${isHttps ? 'https' : 'http'}://${containerId}:${port}`
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
[`${serviceId}-http2`]: {
|
||||
loadbalancer: {
|
||||
servers: [
|
||||
{
|
||||
url: `h2c://${containerId}:${port}`
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
[serviceId]: {
|
||||
loadbalancer: {
|
||||
@@ -14,43 +37,57 @@ function generateServices(serviceId, containerId, port) {
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, isDualCerts, isCustomSSL) {
|
||||
function generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
isDualCerts,
|
||||
isCustomSSL,
|
||||
isHttp2 = false
|
||||
) {
|
||||
let rule = `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`;
|
||||
let ruleWWW = `Host(\`www.${nakedDomain}\`)${
|
||||
pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''
|
||||
}`;
|
||||
let http: any = {
|
||||
entrypoints: ['web'],
|
||||
rule: `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`,
|
||||
rule,
|
||||
service: `${serviceId}`,
|
||||
priority: 2,
|
||||
middlewares: []
|
||||
}
|
||||
};
|
||||
let https: any = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`,
|
||||
rule,
|
||||
service: `${serviceId}`,
|
||||
priority: 2,
|
||||
tls: {
|
||||
certresolver: 'letsencrypt'
|
||||
},
|
||||
middlewares: []
|
||||
}
|
||||
};
|
||||
let httpWWW: any = {
|
||||
entrypoints: ['web'],
|
||||
rule: `Host(\`www.${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`,
|
||||
rule: ruleWWW,
|
||||
service: `${serviceId}`,
|
||||
priority: 2,
|
||||
middlewares: []
|
||||
}
|
||||
};
|
||||
let httpsWWW: any = {
|
||||
entrypoints: ['websecure'],
|
||||
rule: `Host(\`www.${nakedDomain}\`)${pathPrefix ? ` && PathPrefix(\`${pathPrefix}\`)` : ''}`,
|
||||
rule: ruleWWW,
|
||||
service: `${serviceId}`,
|
||||
priority: 2,
|
||||
tls: {
|
||||
certresolver: 'letsencrypt'
|
||||
},
|
||||
middlewares: []
|
||||
}
|
||||
};
|
||||
// 2. http + non-www only
|
||||
if (!isHttps && !isWWW) {
|
||||
https.middlewares.push('redirect-to-http');
|
||||
@@ -58,19 +95,19 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
|
||||
|
||||
httpWWW.middlewares.push('redirect-to-non-www');
|
||||
httpsWWW.middlewares.push('redirect-to-non-www');
|
||||
delete https.tls
|
||||
delete httpsWWW.tls
|
||||
delete https.tls;
|
||||
delete httpsWWW.tls;
|
||||
}
|
||||
|
||||
// 3. http + www only
|
||||
// 3. http + www only
|
||||
if (!isHttps && isWWW) {
|
||||
https.middlewares.push('redirect-to-http');
|
||||
httpsWWW.middlewares.push('redirect-to-http');
|
||||
|
||||
http.middlewares.push('redirect-to-www');
|
||||
https.middlewares.push('redirect-to-www');
|
||||
delete https.tls
|
||||
delete httpsWWW.tls
|
||||
delete https.tls;
|
||||
delete httpsWWW.tls;
|
||||
}
|
||||
// 5. https + non-www only
|
||||
if (isHttps && !isWWW) {
|
||||
@@ -86,17 +123,17 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
|
||||
httpsWWW.tls = true;
|
||||
} else {
|
||||
https.tls = true;
|
||||
delete httpsWWW.tls.certresolver
|
||||
delete httpsWWW.tls.certresolver;
|
||||
httpsWWW.tls.domains = {
|
||||
main: domain
|
||||
}
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (!isDualCerts) {
|
||||
delete httpsWWW.tls.certresolver
|
||||
delete httpsWWW.tls.certresolver;
|
||||
httpsWWW.tls.domains = {
|
||||
main: domain
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -114,26 +151,59 @@ function generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, is
|
||||
httpsWWW.tls = true;
|
||||
} else {
|
||||
httpsWWW.tls = true;
|
||||
delete https.tls.certresolver
|
||||
delete https.tls.certresolver;
|
||||
https.tls.domains = {
|
||||
main: domain
|
||||
}
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (!isDualCerts) {
|
||||
delete https.tls.certresolver
|
||||
delete https.tls.certresolver;
|
||||
https.tls.domains = {
|
||||
main: domain
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isHttp2) {
|
||||
let http2 = {
|
||||
...http,
|
||||
service: `${serviceId}-http2`,
|
||||
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
|
||||
};
|
||||
let http2WWW = {
|
||||
...httpWWW,
|
||||
service: `${serviceId}-http2`,
|
||||
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
|
||||
};
|
||||
let https2 = {
|
||||
...https,
|
||||
service: `${serviceId}-http2`,
|
||||
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
|
||||
};
|
||||
|
||||
let https2WWW = {
|
||||
...httpsWWW,
|
||||
service: `${serviceId}-http2`,
|
||||
rule: `${rule} && HeadersRegexp(\`Content-Type\`, \`application/grpc*\`)`
|
||||
};
|
||||
return {
|
||||
[`${serviceId}-${pathPrefix}`]: { ...http },
|
||||
[`${serviceId}-${pathPrefix}-http2`]: { ...http2 },
|
||||
[`${serviceId}-${pathPrefix}-secure`]: { ...https },
|
||||
[`${serviceId}-${pathPrefix}-secure-http2`]: { ...https2 },
|
||||
[`${serviceId}-${pathPrefix}-www`]: { ...httpWWW },
|
||||
[`${serviceId}-${pathPrefix}-www-http2`]: { ...http2WWW },
|
||||
[`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW },
|
||||
[`${serviceId}-${pathPrefix}-secure-www-http2`]: { ...https2WWW }
|
||||
};
|
||||
}
|
||||
return {
|
||||
[`${serviceId}-${pathPrefix}`]: { ...http },
|
||||
[`${serviceId}-${pathPrefix}-secure`]: { ...https },
|
||||
[`${serviceId}-${pathPrefix}-www`]: { ...httpWWW },
|
||||
[`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW },
|
||||
}
|
||||
[`${serviceId}-${pathPrefix}-secure-www`]: { ...httpsWWW }
|
||||
};
|
||||
}
|
||||
export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote: boolean = false) {
|
||||
const traefik = {
|
||||
@@ -174,26 +244,26 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
const coolifySettings = await prisma.setting.findFirst();
|
||||
if (coolifySettings.isTraefikUsed && coolifySettings.proxyDefaultRedirect) {
|
||||
traefik.http.routers['catchall-http'] = {
|
||||
entrypoints: ["web"],
|
||||
rule: "HostRegexp(`{catchall:.*}`)",
|
||||
service: "noop",
|
||||
entrypoints: ['web'],
|
||||
rule: 'HostRegexp(`{catchall:.*}`)',
|
||||
service: 'noop',
|
||||
priority: 1,
|
||||
middlewares: ["redirect-regexp"]
|
||||
}
|
||||
middlewares: ['redirect-regexp']
|
||||
};
|
||||
traefik.http.routers['catchall-https'] = {
|
||||
entrypoints: ["websecure"],
|
||||
rule: "HostRegexp(`{catchall:.*}`)",
|
||||
service: "noop",
|
||||
entrypoints: ['websecure'],
|
||||
rule: 'HostRegexp(`{catchall:.*}`)',
|
||||
service: 'noop',
|
||||
priority: 1,
|
||||
middlewares: ["redirect-regexp"]
|
||||
}
|
||||
middlewares: ['redirect-regexp']
|
||||
};
|
||||
traefik.http.middlewares['redirect-regexp'] = {
|
||||
redirectregex: {
|
||||
regex: '(.*)',
|
||||
replacement: coolifySettings.proxyDefaultRedirect,
|
||||
permanent: false
|
||||
}
|
||||
}
|
||||
};
|
||||
traefik.http.services['noop'] = {
|
||||
loadBalancer: {
|
||||
servers: [
|
||||
@@ -202,25 +272,41 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
const sslpath = '/etc/traefik/acme/custom';
|
||||
|
||||
let certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } } } } })
|
||||
let certificates = await prisma.certificate.findMany({
|
||||
where: {
|
||||
team: {
|
||||
applications: { some: { settings: { isCustomSSL: true } } },
|
||||
destinationDocker: { some: { remoteEngine: false, isCoolifyProxyUsed: true } }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (remote) {
|
||||
certificates = await prisma.certificate.findMany({ where: { team: { applications: { some: { settings: { isCustomSSL: true } } }, destinationDocker: { some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true } } } } })
|
||||
certificates = await prisma.certificate.findMany({
|
||||
where: {
|
||||
team: {
|
||||
applications: { some: { settings: { isCustomSSL: true } } },
|
||||
destinationDocker: {
|
||||
some: { id, remoteEngine: true, isCoolifyProxyUsed: true, remoteVerified: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let parsedCertificates = []
|
||||
let parsedCertificates = [];
|
||||
for (const certificate of certificates) {
|
||||
parsedCertificates.push({
|
||||
certFile: `${sslpath}/${certificate.id}-cert.pem`,
|
||||
keyFile: `${sslpath}/${certificate.id}-key.pem`
|
||||
})
|
||||
});
|
||||
}
|
||||
if (parsedCertificates.length > 0) {
|
||||
traefik.tls.certificates = parsedCertificates
|
||||
traefik.tls.certificates = parsedCertificates;
|
||||
}
|
||||
|
||||
let applications = [];
|
||||
@@ -236,7 +322,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
serviceSetting: true,
|
||||
serviceSetting: true
|
||||
},
|
||||
orderBy: { createdAt: 'desc' }
|
||||
});
|
||||
@@ -251,23 +337,25 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
serviceSetting: true,
|
||||
serviceSetting: true
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
orderBy: { createdAt: 'desc' }
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (applications.length > 0) {
|
||||
const dockerIds = new Set()
|
||||
const runningContainers = {}
|
||||
const dockerIds = new Set();
|
||||
const runningContainers = {};
|
||||
applications.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||
for (const dockerId of dockerIds) {
|
||||
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||
const { stdout: container } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'`
|
||||
});
|
||||
if (container) {
|
||||
const containersArray = container.trim().split('\n');
|
||||
if (containersArray.length > 0) {
|
||||
runningContainers[dockerId] = containersArray
|
||||
runningContainers[dockerId] = containersArray;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -289,38 +377,54 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
if (
|
||||
!runningContainers[destinationDockerId] ||
|
||||
runningContainers[destinationDockerId].length === 0 ||
|
||||
runningContainers[destinationDockerId].filter((container) => container.startsWith(id)).length === 0
|
||||
runningContainers[destinationDockerId].filter((container) => container.startsWith(id))
|
||||
.length === 0
|
||||
) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
if (buildPack === 'compose') {
|
||||
const services = Object.entries(JSON.parse(dockerComposeConfiguration))
|
||||
const services = Object.entries(JSON.parse(dockerComposeConfiguration));
|
||||
if (services.length > 0) {
|
||||
for (const service of services) {
|
||||
const [key, value] = service
|
||||
const [key, value] = service;
|
||||
if (key && value) {
|
||||
if (!value.fqdn || !value.port) {
|
||||
continue;
|
||||
}
|
||||
const { fqdn, port } = value
|
||||
const containerId = `${id}-${key}`
|
||||
const { fqdn, port } = value;
|
||||
const containerId = `${id}-${key}`;
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace(/^www\./, '');
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
const isWWW = fqdn.includes('www.');
|
||||
const pathPrefix = '/'
|
||||
const pathPrefix = '/';
|
||||
const isCustomSSL = false;
|
||||
const dualCerts = false;
|
||||
const serviceId = `${id}-${port || 'default'}`
|
||||
const serviceId = `${id}-${port || 'default'}`;
|
||||
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, containerId, port) }
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, containerId, port)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const { previews, dualCerts, isCustomSSL } = settings;
|
||||
const { previews, dualCerts, isCustomSSL, isHttp2 } = settings;
|
||||
const { network, id: dockerId } = destinationDocker;
|
||||
if (!fqdn) {
|
||||
continue;
|
||||
@@ -329,12 +433,31 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
const nakedDomain = domain.replace(/^www\./, '');
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
const isWWW = fqdn.includes('www.');
|
||||
const pathPrefix = '/'
|
||||
const serviceId = `${id}-${port || 'default'}`
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
||||
const pathPrefix = '/';
|
||||
const serviceId = `${id}-${port || 'default'}`;
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL,
|
||||
isHttp2
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, id, port, isHttp2, isHttps)
|
||||
};
|
||||
if (previews) {
|
||||
const { stdout } = await executeCommand({ dockerId, command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"` })
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter="status=running" --filter="network=${network}" --filter="name=${id}-" --format="{{json .Names}}"`
|
||||
});
|
||||
if (stdout) {
|
||||
const containers = stdout
|
||||
.trim()
|
||||
@@ -343,44 +466,57 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
.map((c) => c.replace(/"/g, ''));
|
||||
if (containers.length > 0) {
|
||||
for (const container of containers) {
|
||||
const previewDomain = `${container.split('-')[1]}${coolifySettings.previewSeparator}${domain}`;
|
||||
const previewDomain = `${container.split('-')[1]}${
|
||||
coolifySettings.previewSeparator
|
||||
}${domain}`;
|
||||
const nakedDomain = previewDomain.replace(/^www\./, '');
|
||||
const pathPrefix = '/'
|
||||
const serviceId = `${container}-${port || 'default'}`
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, previewDomain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) }
|
||||
const pathPrefix = '/';
|
||||
const serviceId = `${container}-${port || 'default'}`;
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
previewDomain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, container, port, isHttp2)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (services.length > 0) {
|
||||
const dockerIds = new Set()
|
||||
const runningContainers = {}
|
||||
const dockerIds = new Set();
|
||||
const runningContainers = {};
|
||||
services.forEach((app) => dockerIds.add(app.destinationDocker.id));
|
||||
for (const dockerId of dockerIds) {
|
||||
const { stdout: container } = await executeCommand({ dockerId, command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'` })
|
||||
const { stdout: container } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container ls --filter 'label=coolify.managed=true' --format '{{ .Names}}'`
|
||||
});
|
||||
if (container) {
|
||||
const containersArray = container.trim().split('\n');
|
||||
if (containersArray.length > 0) {
|
||||
runningContainers[dockerId] = containersArray
|
||||
runningContainers[dockerId] = containersArray;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const service of services) {
|
||||
try {
|
||||
let {
|
||||
fqdn,
|
||||
id,
|
||||
type,
|
||||
destinationDockerId,
|
||||
dualCerts,
|
||||
serviceSetting
|
||||
} = service;
|
||||
let { fqdn, id, type, destinationDockerId, dualCerts, serviceSetting } = service;
|
||||
if (!fqdn) {
|
||||
continue;
|
||||
}
|
||||
@@ -392,7 +528,7 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
runningContainers[destinationDockerId].length === 0 ||
|
||||
!runningContainers[destinationDockerId].includes(id)
|
||||
) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const templates = await getTemplates();
|
||||
let found = templates.find((a) => a.type === type);
|
||||
@@ -401,88 +537,144 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
}
|
||||
found = JSON.parse(JSON.stringify(found).replaceAll('$$id', id));
|
||||
for (const oneService of Object.keys(found.services)) {
|
||||
const isDomainConfiguration = found?.services[oneService]?.proxy?.filter(p => p.domain) ?? [];
|
||||
if (isDomainConfiguration.length > 0) {
|
||||
const { proxy } = found.services[oneService];
|
||||
const isDomainAndProxyConfiguration =
|
||||
found?.services[oneService]?.proxy?.filter((p) => p.port) ?? [];
|
||||
if (isDomainAndProxyConfiguration.length > 0) {
|
||||
const template: any = await parseAndFindServiceTemplates(service, null, true);
|
||||
const { proxy } = template.services[oneService] || found.services[oneService];
|
||||
for (let configuration of proxy) {
|
||||
if (configuration.domain) {
|
||||
const setting = serviceSetting.find((a) => a.variableName === configuration.domain);
|
||||
const setting = serviceSetting.find(
|
||||
(a) => a.variableName === configuration.domain
|
||||
);
|
||||
if (setting) {
|
||||
configuration.domain = configuration.domain.replace(configuration.domain, setting.value);
|
||||
configuration.domain = configuration.domain.replace(
|
||||
configuration.domain,
|
||||
setting.value
|
||||
);
|
||||
}
|
||||
}
|
||||
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port')
|
||||
const foundPortVariable = serviceSetting.find(
|
||||
(a) => a.name.toLowerCase() === 'port'
|
||||
);
|
||||
if (foundPortVariable) {
|
||||
configuration.port = foundPortVariable.value
|
||||
configuration.port = foundPortVariable.value;
|
||||
}
|
||||
let port, pathPrefix, customDomain;
|
||||
if (configuration) {
|
||||
port = configuration?.port;
|
||||
pathPrefix = configuration?.pathPrefix || '/';
|
||||
customDomain = configuration?.domain
|
||||
customDomain = configuration?.domain;
|
||||
}
|
||||
if (customDomain) {
|
||||
fqdn = customDomain
|
||||
fqdn = customDomain;
|
||||
} else {
|
||||
fqdn = service.fqdn
|
||||
fqdn = service.fqdn;
|
||||
}
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace(/^www\./, '');
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
const isWWW = fqdn.includes('www.');
|
||||
const isCustomSSL = false;
|
||||
const serviceId = `${oneService}-${port || 'default'}`
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, oneService, port) }
|
||||
const serviceId = `${oneService}-${port || 'default'}`;
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, oneService, port)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (found.services[oneService].ports && found.services[oneService].ports.length > 0) {
|
||||
for (let [index, port] of found.services[oneService].ports.entries()) {
|
||||
if (port == 22) continue;
|
||||
if (index === 0) {
|
||||
const foundPortVariable = serviceSetting.find((a) => a.name.toLowerCase() === 'port')
|
||||
const foundPortVariable = serviceSetting.find(
|
||||
(a) => a.name.toLowerCase() === 'port'
|
||||
);
|
||||
if (foundPortVariable) {
|
||||
port = foundPortVariable.value
|
||||
port = foundPortVariable.value;
|
||||
}
|
||||
}
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace(/^www\./, '');
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
const isWWW = fqdn.includes('www.');
|
||||
const pathPrefix = '/'
|
||||
const isCustomSSL = false
|
||||
const serviceId = `${oneService}-${port || 'default'}`
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, id, port) }
|
||||
const pathPrefix = '/';
|
||||
const isCustomSSL = false;
|
||||
const serviceId = `${oneService}-${port || 'default'}`;
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, id, port)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!remote) {
|
||||
const { fqdn, dualCerts } = await prisma.setting.findFirst();
|
||||
if (!fqdn) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace(/^www\./, '');
|
||||
const isHttps = fqdn.startsWith('https://');
|
||||
const isWWW = fqdn.includes('www.');
|
||||
const id = isDev ? 'host.docker.internal' : 'coolify'
|
||||
const container = isDev ? 'host.docker.internal' : 'coolify'
|
||||
const port = 3000
|
||||
const pathPrefix = '/'
|
||||
const isCustomSSL = false
|
||||
const serviceId = `${id}-${port || 'default'}`
|
||||
traefik.http.routers = { ...traefik.http.routers, ...generateRouters(serviceId, domain, nakedDomain, pathPrefix, isHttps, isWWW, dualCerts, isCustomSSL) }
|
||||
traefik.http.services = { ...traefik.http.services, ...generateServices(serviceId, container, port) }
|
||||
const id = isDev ? 'host.docker.internal' : 'coolify';
|
||||
const container = isDev ? 'host.docker.internal' : 'coolify';
|
||||
const port = 3000;
|
||||
const pathPrefix = '/';
|
||||
const isCustomSSL = false;
|
||||
const serviceId = `${id}-${port || 'default'}`;
|
||||
traefik.http.routers = {
|
||||
...traefik.http.routers,
|
||||
...generateRouters(
|
||||
serviceId,
|
||||
domain,
|
||||
nakedDomain,
|
||||
pathPrefix,
|
||||
isHttps,
|
||||
isWWW,
|
||||
dualCerts,
|
||||
isCustomSSL
|
||||
)
|
||||
};
|
||||
traefik.http.services = {
|
||||
...traefik.http.services,
|
||||
...generateServices(serviceId, container, port)
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log(error);
|
||||
} finally {
|
||||
if (Object.keys(traefik.http.routers).length === 0) {
|
||||
traefik.http.routers = null;
|
||||
@@ -496,9 +688,9 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
|
||||
export async function otherProxyConfiguration(request: FastifyRequest<TraefikOtherConfiguration>) {
|
||||
try {
|
||||
const { id } = request.query
|
||||
const { id } = request.query;
|
||||
if (id) {
|
||||
const { privatePort, publicPort, type, address = id } = request.query
|
||||
const { privatePort, publicPort, type, address = id } = request.query;
|
||||
let traefik = {};
|
||||
if (publicPort && type && privatePort) {
|
||||
if (type === 'tcp') {
|
||||
@@ -559,18 +751,18 @@ export async function otherProxyConfiguration(request: FastifyRequest<TraefikOth
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw { status: 500 }
|
||||
throw { status: 500 };
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw { status: 500 }
|
||||
throw { status: 500 };
|
||||
}
|
||||
return {
|
||||
...traefik
|
||||
};
|
||||
}
|
||||
throw { status: 500 }
|
||||
throw { status: 500 };
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,11 @@ import { proxyConfiguration, otherProxyConfiguration } from './handlers';
|
||||
import { OtherProxyConfiguration } from './types';
|
||||
|
||||
const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get<OnlyId>('/main.json', async (request, reply) => proxyConfiguration(request, false));
|
||||
fastify.get<OnlyId>('/remote/:id', async (request) => proxyConfiguration(request, true));
|
||||
fastify.get<OtherProxyConfiguration>('/other.json', async (request, reply) => otherProxyConfiguration(request));
|
||||
fastify.get<OnlyId>('/main.json', async (request, reply) => proxyConfiguration(request, false));
|
||||
fastify.get<OnlyId>('/remote/:id', async (request) => proxyConfiguration(request, true));
|
||||
fastify.get<OtherProxyConfiguration>('/other.json', async (request, reply) =>
|
||||
otherProxyConfiguration(request)
|
||||
);
|
||||
};
|
||||
|
||||
export default root;
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,9 +0,0 @@
|
||||
import { parentPort } from 'node:worker_threads';
|
||||
import process from 'node:process';
|
||||
|
||||
console.log('Hello TypeScript!');
|
||||
|
||||
// signal to parent that the job is done
|
||||
if (parentPort) parentPort.postMessage('done');
|
||||
// eslint-disable-next-line unicorn/no-process-exit
|
||||
else process.exit(0);
|
||||
@@ -1,26 +0,0 @@
|
||||
import Bree from 'bree';
|
||||
import path from 'path';
|
||||
import Cabin from 'cabin';
|
||||
import TSBree from '@breejs/ts-worker';
|
||||
|
||||
export const isDev = process.env['NODE_ENV'] === 'development';
|
||||
|
||||
Bree.extend(TSBree);
|
||||
|
||||
const options: any = {
|
||||
defaultExtension: 'js',
|
||||
logger: new Cabin(),
|
||||
// logger: false,
|
||||
// workerMessageHandler: async ({ name, message }) => {
|
||||
// if (name === 'deployApplication' && message?.deploying) {
|
||||
// if (scheduler.workers.has('autoUpdater') || scheduler.workers.has('cleanupStorage')) {
|
||||
// scheduler.workers.get('deployApplication').postMessage('cancel')
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
// jobs: [{ name: 'deployApplication' }]
|
||||
jobs: [{ name: 'worker' }]
|
||||
};
|
||||
if (isDev) options.root = path.join(__dirname, '../jobs');
|
||||
|
||||
export const scheduler = new Bree(options);
|
||||
@@ -1,84 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
import { privateProcedure, router } from '../trpc';
|
||||
import { decrypt } from '../../lib/common';
|
||||
import { prisma } from '../../prisma';
|
||||
import { executeCommand } from '../../lib/executeCommand';
|
||||
import { stopDatabaseContainer, stopTcpHttpProxy } from '../../lib/docker';
|
||||
|
||||
export const databasesRouter = router({
|
||||
status: privateProcedure.input(z.object({ id: z.string() })).query(async ({ ctx, input }) => {
|
||||
const id = input.id;
|
||||
const teamId = ctx.user?.teamId;
|
||||
|
||||
let isRunning = false;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (database) {
|
||||
const { destinationDockerId, destinationDocker } = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId: destinationDocker.id,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
|
||||
if (JSON.parse(stdout).Running) {
|
||||
isRunning = true;
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
isRunning
|
||||
};
|
||||
}),
|
||||
cleanup: privateProcedure.query(async ({ ctx }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
let databases = await prisma.database.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { settings: true, destinationDocker: true, teams: true }
|
||||
});
|
||||
for (const database of databases) {
|
||||
if (!database?.version) {
|
||||
const { id } = database;
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted)
|
||||
await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.database.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}),
|
||||
delete: privateProcedure
|
||||
.input(z.object({ id: z.string(), force: z.boolean() }))
|
||||
.mutation(async ({ ctx, input }) => {
|
||||
const { id, force } = input;
|
||||
const teamId = ctx.user?.teamId;
|
||||
const database = await prisma.database.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, settings: true }
|
||||
});
|
||||
if (!force) {
|
||||
if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword);
|
||||
if (database.rootUserPassword)
|
||||
database.rootUserPassword = decrypt(database.rootUserPassword);
|
||||
if (database.destinationDockerId) {
|
||||
const everStarted = await stopDatabaseContainer(database);
|
||||
if (everStarted)
|
||||
await stopTcpHttpProxy(id, database.destinationDocker, database.publicPort);
|
||||
}
|
||||
}
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: id } });
|
||||
await prisma.database.delete({ where: { id } });
|
||||
return {};
|
||||
})
|
||||
});
|
||||
@@ -1,171 +0,0 @@
|
||||
import { z } from 'zod';
|
||||
import { privateProcedure, router } from '../trpc';
|
||||
import { decrypt, getTemplates, removeService } from '../../lib/common';
|
||||
import { prisma } from '../../prisma';
|
||||
import { executeCommand } from '../../lib/executeCommand';
|
||||
|
||||
export const servicesRouter = router({
|
||||
status: privateProcedure.input(z.object({ id: z.string() })).query(async ({ ctx, input }) => {
|
||||
const id = input.id;
|
||||
const teamId = ctx.user?.teamId;
|
||||
if (!teamId) {
|
||||
throw { status: 400, message: 'Team not found.' };
|
||||
}
|
||||
const service = await getServiceFromDB({ id, teamId });
|
||||
const { destinationDockerId } = service;
|
||||
let payload = {};
|
||||
if (destinationDockerId) {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: service.destinationDocker.id,
|
||||
command: `docker ps -a --filter "label=com.docker.compose.project=${id}" --format '{{json .}}'`
|
||||
});
|
||||
if (containers) {
|
||||
const containersArray = containers.trim().split('\n');
|
||||
if (containersArray.length > 0 && containersArray[0] !== '') {
|
||||
const templates = await getTemplates();
|
||||
let template = templates.find((t: { type: string }) => t.type === service.type);
|
||||
const templateStr = JSON.stringify(template);
|
||||
if (templateStr) {
|
||||
template = JSON.parse(templateStr.replaceAll('$$id', service.id));
|
||||
}
|
||||
for (const container of containersArray) {
|
||||
let isRunning = false;
|
||||
let isExited = false;
|
||||
let isRestarting = false;
|
||||
let isExcluded = false;
|
||||
const containerObj = JSON.parse(container);
|
||||
const exclude = template?.services[containerObj.Names]?.exclude;
|
||||
if (exclude) {
|
||||
payload[containerObj.Names] = {
|
||||
status: {
|
||||
isExcluded: true,
|
||||
isRunning: false,
|
||||
isExited: false,
|
||||
isRestarting: false
|
||||
}
|
||||
};
|
||||
continue;
|
||||
}
|
||||
|
||||
const status = containerObj.State;
|
||||
if (status === 'running') {
|
||||
isRunning = true;
|
||||
}
|
||||
if (status === 'exited') {
|
||||
isExited = true;
|
||||
}
|
||||
if (status === 'restarting') {
|
||||
isRestarting = true;
|
||||
}
|
||||
payload[containerObj.Names] = {
|
||||
status: {
|
||||
isExcluded,
|
||||
isRunning,
|
||||
isExited,
|
||||
isRestarting
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return payload;
|
||||
}),
|
||||
cleanup: privateProcedure.query(async ({ ctx }) => {
|
||||
const teamId = ctx.user?.teamId;
|
||||
let services = await prisma.service.findMany({
|
||||
where: { teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
for (const service of services) {
|
||||
if (!service.fqdn) {
|
||||
if (service.destinationDockerId) {
|
||||
const { stdout: containers } = await executeCommand({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker ps -a --filter 'label=com.docker.compose.project=${service.id}' --format {{.ID}}`
|
||||
});
|
||||
if (containers) {
|
||||
const containerArray = containers.split('\n');
|
||||
if (containerArray.length > 0) {
|
||||
for (const container of containerArray) {
|
||||
await executeCommand({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker stop -t 0 ${container}`
|
||||
});
|
||||
await executeCommand({
|
||||
dockerId: service.destinationDockerId,
|
||||
command: `docker rm --force ${container}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await removeService({ id: service.id });
|
||||
}
|
||||
}
|
||||
}),
|
||||
delete: privateProcedure
|
||||
.input(z.object({ force: z.boolean(), id: z.string() }))
|
||||
.mutation(async ({ input }) => {
|
||||
// todo: check if user is allowed to delete service
|
||||
const { id } = input;
|
||||
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.fider.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.ghost.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.umami.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.hasura.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.minio.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.wordpress.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.glitchTip.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
|
||||
await prisma.service.delete({ where: { id } });
|
||||
return {};
|
||||
})
|
||||
});
|
||||
|
||||
export async function getServiceFromDB({
|
||||
id,
|
||||
teamId
|
||||
}: {
|
||||
id: string;
|
||||
teamId: string;
|
||||
}): Promise<any> {
|
||||
const settings = await prisma.setting.findFirst();
|
||||
const body = await prisma.service.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: {
|
||||
destinationDocker: true,
|
||||
persistentStorage: true,
|
||||
serviceSecret: true,
|
||||
serviceSetting: true,
|
||||
wordpress: true,
|
||||
plausibleAnalytics: true
|
||||
}
|
||||
});
|
||||
if (!body) {
|
||||
return null;
|
||||
}
|
||||
// body.type = fixType(body.type);
|
||||
|
||||
if (body?.serviceSecret.length > 0) {
|
||||
body.serviceSecret = body.serviceSecret.map((s) => {
|
||||
s.value = decrypt(s.value);
|
||||
return s;
|
||||
});
|
||||
}
|
||||
if (body.wordpress) {
|
||||
body.wordpress.ftpPassword = decrypt(body.wordpress.ftpPassword);
|
||||
}
|
||||
|
||||
return { ...body, settings };
|
||||
}
|
||||
@@ -3,6 +3,15 @@ import { addToast } from './store';
|
||||
import Cookies from 'js-cookie';
|
||||
export const asyncSleep = (delay: number) => new Promise((resolve) => setTimeout(resolve, delay));
|
||||
|
||||
export function dashify(str: string, options?: any): string {
|
||||
if (typeof str !== 'string') return str;
|
||||
return str
|
||||
.trim()
|
||||
.replace(/\W/g, (m) => (/[À-ž]/.test(m) ? m : '-'))
|
||||
.replace(/^-+|-+$/g, '')
|
||||
.replace(/-{2,}/g, (m) => (options && options.condense ? '-' : m))
|
||||
.toLowerCase();
|
||||
}
|
||||
export function errorNotification(error: any | { message: string }): void {
|
||||
if (error instanceof Error) {
|
||||
console.error(error.message)
|
||||
@@ -0,0 +1,44 @@
|
||||
<script lang="ts">
|
||||
import ExternalLink from './ExternalLink.svelte';
|
||||
import Tooltip from './Tooltip.svelte';
|
||||
export let url = 'https://docs.coollabs.io';
|
||||
export let text: any = '';
|
||||
export let isExternal = false;
|
||||
let id =
|
||||
'cool-' +
|
||||
url
|
||||
.split('')
|
||||
.map((c) => c.charCodeAt(0).toString(16).padStart(2, '0'))
|
||||
.join('')
|
||||
.slice(-16);
|
||||
</script>
|
||||
|
||||
<a
|
||||
{id}
|
||||
href={url}
|
||||
target="_blank noreferrer"
|
||||
class="flex no-underline inline-block cursor-pointer"
|
||||
class:icons={!text}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-6 h-6"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="M9.879 7.519c1.171-1.025 3.071-1.025 4.242 0 1.172 1.025 1.172 2.687 0 3.712-.203.179-.43.326-.67.442-.745.361-1.45.999-1.45 1.827v.75M21 12a9 9 0 11-18 0 9 9 0 0118 0zm-9 5.25h.008v.008H12v-.008z"
|
||||
/>
|
||||
</svg>
|
||||
{text}
|
||||
{#if isExternal}
|
||||
<ExternalLink />
|
||||
{/if}
|
||||
</a>
|
||||
{#if !text}
|
||||
<Tooltip triggeredBy={`#${id}`}>See details in the documentation</Tooltip>
|
||||
{/if}
|
||||
@@ -0,0 +1,10 @@
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="3"
|
||||
stroke="currentColor"
|
||||
class="w-3 h-3 text-white"
|
||||
>
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M4.5 19.5l15-15m0 0H8.25m11.25 0v11.25" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 261 B |
@@ -0,0 +1,6 @@
|
||||
<script lang="ts">
|
||||
export let text: string;
|
||||
export let customClass = 'max-w-[24rem]';
|
||||
</script>
|
||||
|
||||
<div class="p-2 text-xs text-stone-400 {customClass}">{@html text}</div>
|
||||
|
Before Width: | Height: | Size: 486 B After Width: | Height: | Size: 486 B |
|
Before Width: | Height: | Size: 262 B After Width: | Height: | Size: 262 B |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user