mirror of
https://github.com/ershisan99/coolify.git
synced 2026-01-07 12:34:18 +00:00
Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33b853b981 | ||
|
|
e6063fb93a | ||
|
|
f30f23af59 | ||
|
|
d4798a3b22 | ||
|
|
f3beb5d8db | ||
|
|
e86b916415 | ||
|
|
e14cc6f2f0 | ||
|
|
8c1eb94401 | ||
|
|
29fa421945 | ||
|
|
7cfe98d988 | ||
|
|
e2314c350b | ||
|
|
3713b33578 | ||
|
|
e007a773fd | ||
|
|
e2821118eb | ||
|
|
4c8e73ac86 | ||
|
|
cb980fb814 | ||
|
|
41c84e3642 | ||
|
|
2bad98424f | ||
|
|
bc6b1e2dea | ||
|
|
911c15d1be | ||
|
|
f79d570870 | ||
|
|
7fffa9fba5 | ||
|
|
cbd634fb99 | ||
|
|
7ae7436d4f | ||
|
|
641bada100 | ||
|
|
3416d8d88e | ||
|
|
0bb503368b | ||
|
|
ac3a77c3c7 | ||
|
|
79b4178d76 | ||
|
|
42a61296d7 | ||
|
|
e8088e2a70 | ||
|
|
c4d39aced2 | ||
|
|
b40a5adeb0 | ||
|
|
558a900620 | ||
|
|
6b5e5a504d | ||
|
|
e44dca2464 | ||
|
|
e1f84b277a | ||
|
|
2518f46b08 | ||
|
|
01e18a9496 | ||
|
|
564ca709d3 | ||
|
|
a54a36ae18 | ||
|
|
43603b0961 | ||
|
|
96cd99f904 | ||
|
|
3438d10e25 | ||
|
|
022ccb42a1 | ||
|
|
e6d72e9f87 | ||
|
|
06e8a6af23 | ||
|
|
ac188d137a | ||
|
|
cae466745a | ||
|
|
d61f16dab0 | ||
|
|
02ba277a86 | ||
|
|
470ff49a02 | ||
|
|
04d741581d | ||
|
|
038f210148 | ||
|
|
2adad3a7bd | ||
|
|
05fb26a49b | ||
|
|
1c237affb4 | ||
|
|
3e81d7e9cb | ||
|
|
edb66620c1 | ||
|
|
04f7e8e777 | ||
|
|
eee201013c | ||
|
|
1190cb4ea1 | ||
|
|
507100ea0b | ||
|
|
9b13912b6d | ||
|
|
ee65deebfd | ||
|
|
ba9fa442d1 | ||
|
|
87da27f9bf | ||
|
|
b5bc5fe2c6 | ||
|
|
d2329360d0 | ||
|
|
7ece0ae10a | ||
|
|
f931b47eb8 | ||
|
|
7f7eb12ded | ||
|
|
c0940f7a19 | ||
|
|
9dfde11e35 | ||
|
|
6f15cc2dbc | ||
|
|
120308638f | ||
|
|
1d04ef99bb | ||
|
|
9b00d177ef | ||
|
|
884524c448 | ||
|
|
3ae1e7e87d | ||
|
|
81f885311d | ||
|
|
d9362f09d8 | ||
|
|
906d181d1b | ||
|
|
44b8812a7b | ||
|
|
3308c45e88 | ||
|
|
e530ecf9f9 | ||
|
|
51b5edb04f | ||
|
|
f0d89f850e | ||
|
|
b777e08542 |
3
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
3
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@@ -2,9 +2,6 @@ name: 🐞 Bug report
|
||||
description: Create a bug report to help us improve coolify
|
||||
title: "[Bug]: "
|
||||
labels: [Bug]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -2,9 +2,6 @@ name: 🛠️ Feature request
|
||||
description: Suggest an idea to improve coolify
|
||||
title: '[Feature]: '
|
||||
labels: [Enhancement]
|
||||
assignees:
|
||||
- andrasbacsai
|
||||
- vasani-arpit
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
name: fluent-bit-release
|
||||
name: Production Release to DockerHub
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "others/fluentbit"
|
||||
- ".github/workflows/fluent-bit-release.yml"
|
||||
branches:
|
||||
- next
|
||||
branches:
|
||||
- "this-branch-does-not-exists"
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
@@ -23,13 +20,18 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-arm64
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -44,13 +46,18 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-amd64
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
@@ -65,13 +72,18 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/fluentbit/
|
||||
context: .
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
@@ -87,7 +99,14 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/coolify-fluent-bit:1.0.0 --amend coollabsio/coolify-fluent-bit:1.0.0-amd64 --amend coollabsio/coolify-fluent-bit:1.0.0-arm64 --amend coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
||||
docker manifest push coollabsio/coolify-fluent-bit:1.0.0
|
||||
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
docker buildx imagetools create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --tag coollabsio/coolify:latest
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PROD_RELEASE_CHANNEL }}
|
||||
103
.github/workflows/production-release.yml
vendored
103
.github/workflows/production-release.yml
vendored
@@ -1,36 +1,14 @@
|
||||
name: production-release
|
||||
name: Production Release to ghcr.io
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: "coollabsio/coolify"
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -40,23 +18,27 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
@@ -66,26 +48,30 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}-aarch64
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
|
||||
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
needs: [amd64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -93,18 +79,23 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||
docker buildx imagetools create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --tag coollabsio/coolify:latest
|
||||
docker buildx imagetools create --append ${{ fromJSON(steps.meta.outputs.json).tags[0] }}-aarch64 --tag ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
||||
docker buildx imagetools create --append ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-aarch64 --tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
110
.github/workflows/release-candidate.yml
vendored
Normal file
110
.github/workflows/release-candidate.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
name: Release Candidate to ghcr.io
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [prereleased]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: "coollabsio/coolify"
|
||||
|
||||
jobs:
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }}
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
@@ -1,19 +1,18 @@
|
||||
name: pocketbase-release
|
||||
name: Staging Release to DockerHub
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "others/pocketbase/*"
|
||||
- ".github/workflows/pocketbase-release.yml"
|
||||
branches:
|
||||
- next
|
||||
- main
|
||||
- "this-branch-does-not-exists"
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
@@ -23,18 +22,25 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.12.3-arm64
|
||||
tags: coollabsio/coolify:next-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
@@ -44,37 +50,21 @@ jobs:
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.12.3-amd64
|
||||
aarch64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: others/pocketbase/
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: coollabsio/pocketbase:0.12.3-aarch64
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [amd64, arm64, aarch64]
|
||||
needs: [arm64, amd64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -89,5 +79,8 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker manifest create coollabsio/pocketbase:0.12.3 --amend coollabsio/pocketbase:0.12.3-amd64 --amend coollabsio/pocketbase:0.12.3-arm64 --amend coollabsio/pocketbase:0.12.3-aarch64
|
||||
docker manifest push coollabsio/pocketbase:0.12.3
|
||||
docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||
117
.github/workflows/staging-release.yml
vendored
117
.github/workflows/staging-release.yml
vendored
@@ -1,76 +1,77 @@
|
||||
name: staging-release
|
||||
|
||||
name: Staging Release to ghcr.io
|
||||
concurrency:
|
||||
group: staging_environment
|
||||
cancel-in-progress: true
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**"
|
||||
- "!others/fluentbit"
|
||||
- "!others/pocketbase"
|
||||
- "!.github/workflows/fluent-bit-release.yml"
|
||||
- "!.github/workflows/pocketbase-release.yml"
|
||||
branches:
|
||||
- next
|
||||
branches-ignore:
|
||||
- "main"
|
||||
- "v4"
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: "coollabsio/coolify"
|
||||
|
||||
jobs:
|
||||
arm64:
|
||||
runs-on: [self-hosted, arm64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next-arm64
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
||||
amd64:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Get current package version
|
||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||
id: package-version
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: coollabsio/coolify:next
|
||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
aarch64:
|
||||
runs-on:
|
||||
group: aarch-runners
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "next"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/aarch64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
merge-manifest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [arm64, amd64]
|
||||
needs: [amd64, aarch64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
@@ -78,14 +79,20 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Extract metadata (tags, labels)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
- name: Create & publish manifest
|
||||
run: |
|
||||
docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
|
||||
docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }}
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always()
|
||||
with:
|
||||
|
||||
@@ -22,7 +22,7 @@ ARG DOCKER_VERSION=20.10.18
|
||||
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
|
||||
ARG DOCKER_COMPOSE_VERSION=2.6.1
|
||||
# https://github.com/buildpacks/pack/releases
|
||||
ARG PACK_VERSION=v0.27.0
|
||||
ARG PACK_VERSION=0.27.0
|
||||
|
||||
RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3
|
||||
RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/
|
||||
@@ -38,7 +38,7 @@ RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
||||
|
||||
COPY --from=build /app/apps/api/build/ .
|
||||
COPY --from=build /app/others/fluentbit/ ./fluentbit
|
||||
# COPY --from=build /app/others/fluentbit/ ./fluentbit
|
||||
COPY --from=build /app/apps/ui/build/ ./public
|
||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||
COPY --from=build /app/apps/api/package.json .
|
||||
@@ -50,4 +50,4 @@ RUN pnpm install -p
|
||||
|
||||
EXPOSE 3000
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
CMD pnpm start
|
||||
CMD pnpm start
|
||||
|
||||
@@ -9,7 +9,7 @@ ARG DOCKER_VERSION=20.10.18
|
||||
# Reverted to 2.6.1 because of this https://github.com/docker/compose/issues/9704. 2.9.0 still has a bug.
|
||||
ARG DOCKER_COMPOSE_VERSION=2.6.1
|
||||
# https://github.com/buildpacks/pack/releases
|
||||
ARG PACK_VERSION=v0.27.0
|
||||
ARG PACK_VERSION=0.27.0
|
||||
WORKDIR /app
|
||||
|
||||
RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION}
|
||||
@@ -28,4 +28,4 @@ RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /
|
||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
||||
|
||||
EXPOSE 3000
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
ENV CHECKPOINT_DISABLE=1
|
||||
|
||||
@@ -100,7 +100,7 @@ Deploy your resource to:
|
||||
|
||||
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
|
||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
||||
- Twitter: [@andrasbacsai](https://twitter.com/heyandras)
|
||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||
- Discord: [Invitation](https://coollabs.io/discord)
|
||||
|
||||
@@ -153,3 +153,6 @@ Support this project with your organization. Your logo will show up here with a
|
||||
|
||||
<a href="https://opencollective.com/coollabsio"><img src="https://opencollective.com/coollabsio/individuals.svg?width=890"></a>
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://star-history.com/#coollabsio/coolify&Date)
|
||||
|
||||
@@ -230,7 +230,7 @@
|
||||
description: "Open Source realtime backend in 1 file"
|
||||
services:
|
||||
$$id:
|
||||
image: coollabsio/pocketbase:$$core_version
|
||||
image: ghcr.io/coollabsio/pocketbase:$$core_version
|
||||
volumes:
|
||||
- $$id-data:/app/pb_data
|
||||
ports:
|
||||
@@ -268,6 +268,13 @@
|
||||
- DISABLE_REGISTRATION=$$config_disable_registration
|
||||
- DATABASE_URL=$$secret_database_url
|
||||
- CLICKHOUSE_DATABASE_URL=$$secret_clickhouse_database_url
|
||||
- MAILER_EMAIL=$$config_mailer_email
|
||||
- SMTP_HOST_ADDR=$$config_smtp_host_addr
|
||||
- SMTP_HOST_PORT=$$config_smtp_host_port
|
||||
- SMTP_USER_NAME=$$config_smtp_user_name
|
||||
- SMTP_USER_PWD=$$config_smtp_user_pwd
|
||||
- SMTP_HOST_SSL_ENABLED=$$config_smtp_host_ssl_enabled
|
||||
- SMTP_HOST_RETRIES=$$config_smtp_host_retries
|
||||
ports:
|
||||
- "8000"
|
||||
$$id-postgresql:
|
||||
@@ -375,6 +382,49 @@
|
||||
label: PostgreSQL Database
|
||||
defaultValue: plausible
|
||||
description: ""
|
||||
- id: $$config_mailer_email
|
||||
name: MAILER_EMAIL
|
||||
label: Mailer Email
|
||||
defaultValue: hello@plausible.local
|
||||
description: >-
|
||||
The email id to use for as from address of all communications from Plausible.
|
||||
- id: $$config_smtp_host_addr
|
||||
name: SMTP_HOST_ADDR
|
||||
label: SMTP Host Address
|
||||
defaultValue: localhost
|
||||
description: >-
|
||||
The host address of your smtp server.
|
||||
- id: $$config_smtp_host_port
|
||||
name: SMTP_HOST_PORT
|
||||
label: SMTP Port
|
||||
defaultValue: "25"
|
||||
description: >-
|
||||
The port of your smtp server.
|
||||
- id: $$config_smtp_user_name
|
||||
name: SMTP_USER_NAME
|
||||
label: SMTP Username
|
||||
defaultValue: ""
|
||||
description: >-
|
||||
The username/email in case SMTP auth is enabled.
|
||||
- id: $$config_smtp_user_pwd
|
||||
name: SMTP_USER_PWD
|
||||
label: SMTP Password
|
||||
defaultValue: ""
|
||||
description: >-
|
||||
The password in case SMTP auth is enabled.
|
||||
showOnConfiguration: true
|
||||
- id: $$config_smtp_host_ssl_enabled
|
||||
name: SMTP_HOST_SSL_ENABLED
|
||||
label: SMTP SSL
|
||||
defaultValue: "false"
|
||||
description: >-
|
||||
If SSL is enabled for SMTP connection.
|
||||
- id: $$config_smtp_host_retries
|
||||
name: SMTP_HOST_RETRIES
|
||||
label: SMTP Retries
|
||||
defaultValue: "2"
|
||||
description: >-
|
||||
Number of retries to make until mailer gives up.
|
||||
- id: $$config_scriptName
|
||||
name: SCRIPT_NAME
|
||||
label: Custom Script Name
|
||||
@@ -414,6 +464,7 @@
|
||||
proxy:
|
||||
- port: "22"
|
||||
hostPort: $$config_hostport_ssh
|
||||
- port: "3000"
|
||||
variables:
|
||||
- id: $$config_hostport_ssh
|
||||
name: SSH_PORT
|
||||
@@ -3388,6 +3439,13 @@
|
||||
- DISABLE_REGISTRATION=$$config_disable_registration
|
||||
- DATABASE_URL=$$secret_database_url
|
||||
- CLICKHOUSE_DATABASE_URL=$$secret_clickhouse_database_url
|
||||
- MAILER_EMAIL=$$config_mailer_email
|
||||
- SMTP_HOST_ADDR=$$config_smtp_host_addr
|
||||
- SMTP_HOST_PORT=$$config_smtp_host_port
|
||||
- SMTP_USER_NAME=$$config_smtp_user_name
|
||||
- SMTP_USER_PWD=$$config_smtp_user_pwd
|
||||
- SMTP_HOST_SSL_ENABLED=$$config_smtp_host_ssl_enabled
|
||||
- SMTP_HOST_RETRIES=$$config_smtp_host_retries
|
||||
ports:
|
||||
- "8000"
|
||||
$$id-postgresql:
|
||||
@@ -3495,6 +3553,49 @@
|
||||
label: PostgreSQL Database
|
||||
defaultValue: plausible
|
||||
description: ""
|
||||
- id: $$config_mailer_email
|
||||
name: MAILER_EMAIL
|
||||
label: Mailer Email
|
||||
defaultValue: hello@plausible.local
|
||||
description: >-
|
||||
The email id to use for as from address of all communications from Plausible.
|
||||
- id: $$config_smtp_host_addr
|
||||
name: SMTP_HOST_ADDR
|
||||
label: SMTP Host Address
|
||||
defaultValue: localhost
|
||||
description: >-
|
||||
The host address of your smtp server.
|
||||
- id: $$config_smtp_host_port
|
||||
name: SMTP_HOST_PORT
|
||||
label: SMTP Port
|
||||
defaultValue: "25"
|
||||
description: >-
|
||||
The port of your smtp server.
|
||||
- id: $$config_smtp_user_name
|
||||
name: SMTP_USER_NAME
|
||||
label: SMTP Username
|
||||
defaultValue: ""
|
||||
description: >-
|
||||
The username/email in case SMTP auth is enabled.
|
||||
- id: $$config_smtp_user_pwd
|
||||
name: SMTP_USER_PWD
|
||||
label: SMTP Password
|
||||
defaultValue: ""
|
||||
description: >-
|
||||
The password in case SMTP auth is enabled.
|
||||
showOnConfiguration: true
|
||||
- id: $$config_smtp_host_ssl_enabled
|
||||
name: SMTP_HOST_SSL_ENABLED
|
||||
label: SMTP SSL
|
||||
defaultValue: "false"
|
||||
description: >-
|
||||
If SSL is enabled for SMTP connection.
|
||||
- id: $$config_smtp_host_retries
|
||||
name: SMTP_HOST_RETRIES
|
||||
label: SMTP Retries
|
||||
defaultValue: "2"
|
||||
description: >-
|
||||
Number of retries to make until mailer gives up.
|
||||
- id: $$config_scriptName
|
||||
name: SCRIPT_NAME
|
||||
label: Custom Script Name
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "ApplicationPersistentStorage" ADD COLUMN "hostPath" TEXT;
|
||||
@@ -195,6 +195,7 @@ model ApplicationSettings {
|
||||
model ApplicationPersistentStorage {
|
||||
id String @id @default(cuid())
|
||||
applicationId String
|
||||
hostPath String?
|
||||
path String
|
||||
oldPath Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@ -306,7 +306,7 @@ async function initServer() {
|
||||
} catch (error) {}
|
||||
try {
|
||||
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
||||
await fs.rm('/tmp/build-sources', { recursive: true, force: true });
|
||||
if (!isDev) await fs.rm('/tmp/build-sources', { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
@@ -402,14 +402,14 @@ async function autoUpdater() {
|
||||
if (!isDev) {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
if (isAutoUpdateEnabled) {
|
||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
||||
await executeCommand({ command: `docker pull ghcr.io/coollabsio/coolify:${latestVersion}` });
|
||||
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
||||
await executeCommand({
|
||||
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
});
|
||||
await executeCommand({
|
||||
shell: true,
|
||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ghcr.io/coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
});
|
||||
}
|
||||
} else {
|
||||
@@ -604,53 +604,54 @@ async function cleanupStorage() {
|
||||
if (!destination.remoteVerified) continue;
|
||||
enginesDone.add(destination.remoteIpAddress);
|
||||
}
|
||||
let lowDiskSpace = false;
|
||||
try {
|
||||
let stdout = null;
|
||||
if (!isDev) {
|
||||
const output = await executeCommand({
|
||||
dockerId: destination.id,
|
||||
command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`,
|
||||
shell: true
|
||||
});
|
||||
stdout = output.stdout;
|
||||
} else {
|
||||
const output = await executeCommand({
|
||||
command: `df -kPT /`
|
||||
});
|
||||
stdout = output.stdout;
|
||||
}
|
||||
let lines = stdout.trim().split('\n');
|
||||
let header = lines[0];
|
||||
let regex =
|
||||
/^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||
const boundaries = [];
|
||||
let match;
|
||||
await cleanupDockerStorage(destination.id);
|
||||
// let lowDiskSpace = false;
|
||||
// try {
|
||||
// let stdout = null;
|
||||
// if (!isDev) {
|
||||
// const output = await executeCommand({
|
||||
// dockerId: destination.id,
|
||||
// command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`,
|
||||
// shell: true
|
||||
// });
|
||||
// stdout = output.stdout;
|
||||
// } else {
|
||||
// const output = await executeCommand({
|
||||
// command: `df -kPT /`
|
||||
// });
|
||||
// stdout = output.stdout;
|
||||
// }
|
||||
// let lines = stdout.trim().split('\n');
|
||||
// let header = lines[0];
|
||||
// let regex =
|
||||
// /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g;
|
||||
// const boundaries = [];
|
||||
// let match;
|
||||
|
||||
while ((match = regex.exec(header))) {
|
||||
boundaries.push(match[0].length);
|
||||
}
|
||||
// while ((match = regex.exec(header))) {
|
||||
// boundaries.push(match[0].length);
|
||||
// }
|
||||
|
||||
boundaries[boundaries.length - 1] = -1;
|
||||
const data = lines.slice(1).map((line) => {
|
||||
const cl = boundaries.map((boundary) => {
|
||||
const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||
line = line.slice(boundary);
|
||||
return column.trim();
|
||||
});
|
||||
return {
|
||||
capacity: Number.parseInt(cl[5], 10) / 100
|
||||
};
|
||||
});
|
||||
if (data.length > 0) {
|
||||
const { capacity } = data[0];
|
||||
if (capacity > 0.8) {
|
||||
lowDiskSpace = true;
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
if (lowDiskSpace) {
|
||||
await cleanupDockerStorage(destination.id);
|
||||
}
|
||||
// boundaries[boundaries.length - 1] = -1;
|
||||
// const data = lines.slice(1).map((line) => {
|
||||
// const cl = boundaries.map((boundary) => {
|
||||
// const column = boundary > 0 ? line.slice(0, boundary) : line;
|
||||
// line = line.slice(boundary);
|
||||
// return column.trim();
|
||||
// });
|
||||
// return {
|
||||
// capacity: Number.parseInt(cl[5], 10) / 100
|
||||
// };
|
||||
// });
|
||||
// if (data.length > 0) {
|
||||
// const { capacity } = data[0];
|
||||
// if (capacity > 0.8) {
|
||||
// lowDiskSpace = true;
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {}
|
||||
// if (lowDiskSpace) {
|
||||
// await cleanupDockerStorage(destination.id);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,7 +69,15 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
teams: true
|
||||
}
|
||||
});
|
||||
|
||||
if (!application) {
|
||||
await prisma.build.update({
|
||||
where: { id: queueBuild.id },
|
||||
data: {
|
||||
status: 'failed'
|
||||
}
|
||||
});
|
||||
throw new Error('Application not found');
|
||||
}
|
||||
let {
|
||||
id: buildId,
|
||||
type,
|
||||
@@ -110,6 +118,9 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
.replace(/\//gi, '-')
|
||||
.replace('-app', '')}:${storage.path}`;
|
||||
}
|
||||
if (storage.hostPath) {
|
||||
return `${storage.hostPath}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||
}) || [];
|
||||
|
||||
@@ -160,13 +171,24 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
port: exposePort ? `${exposePort}:${port}` : port
|
||||
});
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
const composeVolumes = volumes
|
||||
.filter((v) => {
|
||||
if (
|
||||
!v.startsWith('.') &&
|
||||
!v.startsWith('..') &&
|
||||
!v.startsWith('/') &&
|
||||
!v.startsWith('~')
|
||||
) {
|
||||
return v;
|
||||
}
|
||||
};
|
||||
});
|
||||
})
|
||||
.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
@@ -233,7 +255,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -256,7 +278,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||
}
|
||||
} finally {
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||
await prisma.build.update({
|
||||
where: { id: buildId },
|
||||
data: { status: 'success' }
|
||||
@@ -381,6 +403,9 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
.replace(/\//gi, '-')
|
||||
.replace('-app', '')}:${storage.path}`;
|
||||
}
|
||||
if (storage.hostPath) {
|
||||
return `${storage.hostPath}:${storage.path}`;
|
||||
}
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||
}) || [];
|
||||
|
||||
@@ -406,7 +431,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
installCommand = configuration.installCommand;
|
||||
startCommand = configuration.startCommand;
|
||||
buildCommand = configuration.buildCommand;
|
||||
publishDirectory = configuration.publishDirectory;
|
||||
publishDirectory = configuration.publishDirectory || '';
|
||||
baseDirectory = configuration.baseDirectory || '';
|
||||
dockerFileLocation = configuration.dockerFileLocation;
|
||||
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||
@@ -693,13 +718,24 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
try {
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
const composeVolumes = volumes
|
||||
.filter((v) => {
|
||||
if (
|
||||
!v.startsWith('.') &&
|
||||
!v.startsWith('..') &&
|
||||
!v.startsWith('/') &&
|
||||
!v.startsWith('~')
|
||||
) {
|
||||
return v;
|
||||
}
|
||||
};
|
||||
});
|
||||
})
|
||||
.map((volume) => {
|
||||
return {
|
||||
[`${volume.split(':')[0]}`]: {
|
||||
name: volume.split(':')[0]
|
||||
}
|
||||
};
|
||||
});
|
||||
const composeFile = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
@@ -770,7 +806,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
applicationId: application.id
|
||||
});
|
||||
}
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -791,7 +827,7 @@ import * as buildpacks from '../lib/buildPacks';
|
||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||
}
|
||||
} finally {
|
||||
await fs.rm(workdir, { recursive: true, force: true });
|
||||
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||
}
|
||||
});
|
||||
|
||||
@@ -86,19 +86,20 @@ export async function migrateServicesToNewTemplate() {
|
||||
if (template.variables) {
|
||||
if (template.variables.length > 0) {
|
||||
for (const variable of template.variables) {
|
||||
const { defaultValue } = variable;
|
||||
let { defaultValue } = variable;
|
||||
defaultValue = defaultValue.toString();
|
||||
const regex = /^\$\$.*\((\d+)\)$/g;
|
||||
const length = Number(regex.exec(defaultValue)?.[1]) || undefined
|
||||
if (variable.defaultValue.startsWith('$$generate_password')) {
|
||||
if (defaultValue.startsWith('$$generate_password')) {
|
||||
variable.value = generatePassword({ length });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_hex')) {
|
||||
} else if (defaultValue.startsWith('$$generate_hex')) {
|
||||
variable.value = generatePassword({ length, isHex: true });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
||||
} else if (defaultValue.startsWith('$$generate_username')) {
|
||||
variable.value = cuid();
|
||||
} else if (variable.defaultValue.startsWith('$$generate_token')) {
|
||||
} else if (defaultValue.startsWith('$$generate_token')) {
|
||||
variable.value = generateToken()
|
||||
} else {
|
||||
variable.value = variable.defaultValue || '';
|
||||
variable.value = defaultValue || '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,7 +429,12 @@ export const setDefaultConfiguration = async (data: any) => {
|
||||
startCommand = template?.startCommand || 'yarn start';
|
||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (!publishDirectory) {
|
||||
publishDirectory = template?.publishDirectory || null;
|
||||
} else {
|
||||
if (!publishDirectory.startsWith('/')) publishDirectory = `/${publishDirectory}`;
|
||||
if (publishDirectory.endsWith('/')) publishDirectory = publishDirectory.slice(0, -1);
|
||||
}
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
|
||||
@@ -702,9 +707,8 @@ export async function buildImage({
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ''} build ${
|
||||
forceRebuild ? '--no-cache' : ''
|
||||
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||
command: `docker ${location ? `--config ${location}` : ''} build ${forceRebuild ? '--no-cache' : ''
|
||||
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||
});
|
||||
|
||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
|
||||
@@ -800,6 +804,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
@@ -817,6 +822,7 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
}
|
||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||
Dockerfile.push(`COPY resources /app/resources`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
@@ -838,6 +844,7 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
|
||||
@@ -36,17 +36,23 @@ export default async function (data) {
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
let [v, path] = volume.split(':');
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
if (!v.startsWith('.') && !v.startsWith('..') && !v.startsWith('/') && !v.startsWith('~')) {
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value['container_name'] = `${applicationId}-${key}`;
|
||||
|
||||
if (value['env_file']) {
|
||||
delete value['env_file'];
|
||||
}
|
||||
|
||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
|
||||
console.log({ key, environment });
|
||||
if (Object.keys(environment).length > 0) {
|
||||
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
|
||||
}
|
||||
@@ -59,7 +65,7 @@ export default async function (data) {
|
||||
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
|
||||
let finalArgs = [...buildEnvs];
|
||||
if (Object.keys(buildArgs).length > 0) {
|
||||
for (const arg of buildArgs) {
|
||||
for (const arg of Object.keys(buildArgs)) {
|
||||
const [key, _] = arg.split('=');
|
||||
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
|
||||
finalArgs.push(arg);
|
||||
@@ -77,17 +83,57 @@ export default async function (data) {
|
||||
// TODO: If we support separated volume for each service, we need to add it here
|
||||
if (value['volumes']?.length > 0) {
|
||||
value['volumes'] = value['volumes'].map((volume) => {
|
||||
let [v, path, permission] = volume.split(':');
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
if (typeof volume === 'string') {
|
||||
let [v, path, permission] = volume.split(':');
|
||||
if (
|
||||
v.startsWith('.') ||
|
||||
v.startsWith('..') ||
|
||||
v.startsWith('/') ||
|
||||
v.startsWith('~') ||
|
||||
v.startsWith('$PWD')
|
||||
) {
|
||||
v = v
|
||||
.replace(/^\./, `~`)
|
||||
.replace(/^\.\./, '~')
|
||||
.replace(/^\$PWD/, '~');
|
||||
} else {
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||
}
|
||||
if (typeof volume === 'object') {
|
||||
let { source, target, mode } = volume;
|
||||
if (
|
||||
source.startsWith('.') ||
|
||||
source.startsWith('..') ||
|
||||
source.startsWith('/') ||
|
||||
source.startsWith('~') ||
|
||||
source.startsWith('$PWD')
|
||||
) {
|
||||
source = source
|
||||
.replace(/^\./, `~`)
|
||||
.replace(/^\.\./, '~')
|
||||
.replace(/^\$PWD/, '~');
|
||||
console.log({ source });
|
||||
} else {
|
||||
if (!target) {
|
||||
target = source;
|
||||
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
}
|
||||
|
||||
return `${source}:${target}${mode ? ':' + mode : ''}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||
});
|
||||
}
|
||||
if (volumes.length > 0) {
|
||||
@@ -95,19 +141,23 @@ export default async function (data) {
|
||||
value['volumes'].push(volume);
|
||||
}
|
||||
}
|
||||
if (dockerComposeConfiguration[key].port) {
|
||||
if (dockerComposeConfiguration[key]?.port) {
|
||||
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||
}
|
||||
if (value['networks']?.length > 0) {
|
||||
value['networks'].forEach((network) => {
|
||||
networks[network] = {
|
||||
name: network
|
||||
};
|
||||
});
|
||||
value['networks'] = [...(value['networks'] || ''), network];
|
||||
} else {
|
||||
value['networks'] = [network];
|
||||
value['networks'] = [network];
|
||||
if (value['build']?.network) {
|
||||
delete value['build']['network'];
|
||||
}
|
||||
// if (value['networks']?.length > 0) {
|
||||
// value['networks'].forEach((network) => {
|
||||
// networks[network] = {
|
||||
// name: network
|
||||
// };
|
||||
// });
|
||||
// value['networks'] = [...(value['networks'] || ''), network];
|
||||
// } else {
|
||||
// value['networks'] = [network];
|
||||
// }
|
||||
|
||||
dockerComposeYaml.services[key] = {
|
||||
...dockerComposeYaml.services[key],
|
||||
|
||||
@@ -36,6 +36,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
|
||||
@@ -8,10 +8,11 @@ const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -30,6 +30,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||
);
|
||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, publishDirectory } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = startCommand.includes('pnpm');
|
||||
|
||||
@@ -12,8 +12,8 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
||||
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
|
||||
@@ -36,13 +36,15 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -36,13 +36,15 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
}
|
||||
|
||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -52,7 +52,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
} else {
|
||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||
}
|
||||
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -20,6 +20,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
);
|
||||
Dockerfile.push(`RUN update-ca-certificates`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
|
||||
@@ -31,13 +31,14 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
});
|
||||
}
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
} else {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
}
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -8,10 +8,11 @@ const createDockerfile = async (data, image): Promise<void> => {
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push('RUN rm -fr .git');
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
@@ -11,15 +11,15 @@ import { promises as dns } from 'dns';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import os from 'os';
|
||||
import sshConfig from 'ssh-config';
|
||||
import * as SSHConfig from 'ssh-config/src/ssh-config';
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { checkContainer, removeContainer } from './docker';
|
||||
import { day } from './dayjs';
|
||||
import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common';
|
||||
import { saveBuildLog } from './buildPacks/common';
|
||||
import { scheduler } from './scheduler';
|
||||
import type { ExecaChildProcess } from 'execa';
|
||||
|
||||
export const version = '3.12.20';
|
||||
export const version = '3.12.32';
|
||||
export const isDev = process.env.NODE_ENV === 'development';
|
||||
export const proxyPort = process.env.COOLIFY_PROXY_PORT;
|
||||
export const proxySecurePort = process.env.COOLIFY_PROXY_SECURE_PORT;
|
||||
@@ -402,8 +402,8 @@ export const supportedDatabaseTypesAndVersions = [
|
||||
fancyName: 'MongoDB',
|
||||
baseImage: 'bitnami/mongodb',
|
||||
baseImageARM: 'mongo',
|
||||
versions: ['5.0', '4.4', '4.2'],
|
||||
versionsARM: ['5.0', '4.4', '4.2']
|
||||
versions: ['6.0', '5.0', '4.4', '4.2'],
|
||||
versionsARM: ['6.0', '5.0', '4.4', '4.2']
|
||||
},
|
||||
{
|
||||
name: 'mysql',
|
||||
@@ -418,16 +418,16 @@ export const supportedDatabaseTypesAndVersions = [
|
||||
fancyName: 'MariaDB',
|
||||
baseImage: 'bitnami/mariadb',
|
||||
baseImageARM: 'mariadb',
|
||||
versions: ['10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2'],
|
||||
versionsARM: ['10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2']
|
||||
versions: ['10.11', '10.10', '10.9', '10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2'],
|
||||
versionsARM: ['10.11', '10.10', '10.9', '10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2']
|
||||
},
|
||||
{
|
||||
name: 'postgresql',
|
||||
fancyName: 'PostgreSQL',
|
||||
baseImage: 'bitnami/postgresql',
|
||||
baseImageARM: 'postgres',
|
||||
versions: ['14.5.0', '13.8.0', '12.12.0', '11.17.0', '10.22.0'],
|
||||
versionsARM: ['14.5', '13.8', '12.12', '11.17', '10.22']
|
||||
versions: ['15.2.0', '14.7.0', '14.5.0', '13.8.0', '12.12.0', '11.17.0', '10.22.0'],
|
||||
versionsARM: ['15.2', '14.7', '14.5', '13.8', '12.12', '11.17', '10.22']
|
||||
},
|
||||
{
|
||||
name: 'redis',
|
||||
@@ -442,14 +442,14 @@ export const supportedDatabaseTypesAndVersions = [
|
||||
fancyName: 'CouchDB',
|
||||
baseImage: 'bitnami/couchdb',
|
||||
baseImageARM: 'couchdb',
|
||||
versions: ['3.2.2', '3.1.2', '2.3.1'],
|
||||
versionsARM: ['3.2.2', '3.1.2', '2.3.1']
|
||||
versions: ['3.3.1', '3.2.2', '3.1.2', '2.3.1'],
|
||||
versionsARM: ['3.3', '3.2.2', '3.1.2', '2.3.1']
|
||||
},
|
||||
{
|
||||
name: 'edgedb',
|
||||
fancyName: 'EdgeDB',
|
||||
baseImage: 'edgedb/edgedb',
|
||||
versions: ['latest', '2.1', '2.0', '1.4']
|
||||
versions: ['latest', '2.9', '2.8', '2.7']
|
||||
}
|
||||
];
|
||||
|
||||
@@ -498,33 +498,56 @@ export async function getFreeSSHLocalPort(id: string): Promise<number | boolean>
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the ssh config file with a host
|
||||
*
|
||||
* @param id Destination ID
|
||||
* @returns
|
||||
*/
|
||||
export async function createRemoteEngineConfiguration(id: string) {
|
||||
const homedir = os.homedir();
|
||||
const sshKeyFile = `/tmp/id_rsa-${id}`;
|
||||
const localPort = await getFreeSSHLocalPort(id);
|
||||
const {
|
||||
sshKey: { privateKey },
|
||||
network,
|
||||
remoteIpAddress,
|
||||
remotePort,
|
||||
remoteUser
|
||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||
|
||||
// Write new keyfile
|
||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||
const config = sshConfig.parse('');
|
||||
|
||||
const Host = `${remoteIpAddress}-remote`;
|
||||
|
||||
// Removes previous ssh-keys
|
||||
try {
|
||||
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||
} catch (error) { }
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
|
||||
const homedir = os.homedir();
|
||||
let currentConfigFileContent = '';
|
||||
try {
|
||||
// Read the current config file
|
||||
currentConfigFileContent = (await fs.readFile(`${homedir}/.ssh/config`)).toString();
|
||||
} catch (error) {
|
||||
// File doesn't exist, so we do nothing, a new one is going to be created
|
||||
}
|
||||
|
||||
// Parse the config file
|
||||
const config = SSHConfig.parse(currentConfigFileContent);
|
||||
|
||||
// Remove current config for the given host
|
||||
const found = config.find({ Host });
|
||||
const foundIp = config.find({ Host: remoteIpAddress });
|
||||
|
||||
if (found) config.remove({ Host });
|
||||
if (foundIp) config.remove({ Host: remoteIpAddress });
|
||||
|
||||
// Create the new config
|
||||
config.append({
|
||||
Host,
|
||||
Hostname: remoteIpAddress,
|
||||
@@ -537,13 +560,17 @@ export async function createRemoteEngineConfiguration(id: string) {
|
||||
ControlPersist: '10m'
|
||||
});
|
||||
|
||||
// Check if .ssh folder exists, and if not create one
|
||||
try {
|
||||
await fs.stat(`${homedir}/.ssh/`);
|
||||
} catch (error) {
|
||||
await fs.mkdir(`${homedir}/.ssh/`);
|
||||
}
|
||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
||||
|
||||
// Write the config
|
||||
return await fs.writeFile(`${homedir}/.ssh/config`, SSHConfig.stringify(config));
|
||||
}
|
||||
|
||||
export async function executeCommand({
|
||||
command,
|
||||
dockerId = null,
|
||||
@@ -552,7 +579,8 @@ export async function executeCommand({
|
||||
stream = false,
|
||||
buildId,
|
||||
applicationId,
|
||||
debug
|
||||
debug,
|
||||
timeout = 0
|
||||
}: {
|
||||
command: string;
|
||||
sshCommand?: boolean;
|
||||
@@ -562,6 +590,7 @@ export async function executeCommand({
|
||||
buildId?: string;
|
||||
applicationId?: string;
|
||||
debug?: boolean;
|
||||
timeout?: number;
|
||||
}): Promise<ExecaChildProcess<string>> {
|
||||
const { execa, execaCommand } = await import('execa');
|
||||
const { parse } = await import('shell-quote');
|
||||
@@ -586,20 +615,26 @@ export async function executeCommand({
|
||||
}
|
||||
if (sshCommand) {
|
||||
if (shell) {
|
||||
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`);
|
||||
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`, {
|
||||
timeout
|
||||
});
|
||||
}
|
||||
return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]);
|
||||
return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs], {
|
||||
timeout
|
||||
});
|
||||
}
|
||||
if (stream) {
|
||||
return await new Promise(async (resolve, reject) => {
|
||||
let subprocess = null;
|
||||
if (shell) {
|
||||
subprocess = execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine },
|
||||
timeout
|
||||
});
|
||||
} else {
|
||||
subprocess = execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine },
|
||||
timeout
|
||||
});
|
||||
}
|
||||
const logs = [];
|
||||
@@ -653,19 +688,26 @@ export async function executeCommand({
|
||||
} else {
|
||||
if (shell) {
|
||||
return await execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine },
|
||||
timeout
|
||||
});
|
||||
} else {
|
||||
return await execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine },
|
||||
timeout
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (shell) {
|
||||
return execaCommand(command, { shell: true });
|
||||
return execaCommand(command, {
|
||||
shell: true,
|
||||
timeout
|
||||
});
|
||||
}
|
||||
return await execa(dockerCommand, dockerArgs);
|
||||
return await execa(dockerCommand, dockerArgs, {
|
||||
timeout
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -986,7 +1028,7 @@ export function generateDatabaseConfiguration(database: any): DatabaseConfigurat
|
||||
ulimits: {}
|
||||
};
|
||||
if (isARM()) {
|
||||
configuration.volume = `${id}-${type}-data:/var/lib/postgresql`;
|
||||
configuration.volume = `${id}-${type}-data:/var/lib/postgresql/data`;
|
||||
configuration.environmentVariables = {
|
||||
POSTGRES_PASSWORD: dbUserPassword,
|
||||
POSTGRES_USER: dbUser,
|
||||
@@ -1048,7 +1090,7 @@ export function generateDatabaseConfiguration(database: any): DatabaseConfigurat
|
||||
}
|
||||
export function isARM() {
|
||||
const arch = process.arch;
|
||||
if (arch === 'arm' || arch === 'arm64') {
|
||||
if (arch === 'arm' || arch === 'arm64' || arch === 'aarch' || arch === 'aarch64') {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -1633,6 +1675,9 @@ export function errorHandler({
|
||||
type?: string | null;
|
||||
}) {
|
||||
if (message.message) message = message.message;
|
||||
if (message.includes('Unique constraint failed')) {
|
||||
message = 'This data is unique and already exists. Please try again with a different value.';
|
||||
}
|
||||
if (type === 'normal') {
|
||||
Sentry.captureException(message);
|
||||
}
|
||||
|
||||
@@ -50,24 +50,12 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
||||
const config = {};
|
||||
for (const s in template.services) {
|
||||
let newEnvironments = []
|
||||
if (arm) {
|
||||
if (template.services[s]?.environmentArm?.length > 0) {
|
||||
for (const environment of template.services[s].environmentArm) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=")
|
||||
if (!value.startsWith('$$secret') && value !== '') {
|
||||
newEnvironments.push(`${env}=${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (template.services[s]?.environment?.length > 0) {
|
||||
for (const environment of template.services[s].environment) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=")
|
||||
if (!value.startsWith('$$secret') && value !== '') {
|
||||
newEnvironments.push(`${env}=${value}`)
|
||||
}
|
||||
if (template.services[s]?.environment?.length > 0) {
|
||||
for (const environment of template.services[s].environment) {
|
||||
let [env, ...value] = environment.split("=");
|
||||
value = value.join("=")
|
||||
if (!value.startsWith('$$secret') && value !== '') {
|
||||
newEnvironments.push(`${env}=${value}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,12 +75,13 @@ export async function startService(request: FastifyRequest<ServiceStartStop>, fa
|
||||
}
|
||||
const customVolumes = await prisma.servicePersistentStorage.findMany({ where: { serviceId: id } })
|
||||
let volumes = new Set()
|
||||
if (arm) {
|
||||
template.services[s]?.volumesArm && template.services[s].volumesArm.length > 0 && template.services[s].volumesArm.forEach(v => volumes.add(v))
|
||||
if (arm && template.services[s]?.volumesArm?.length > 0) {
|
||||
template.services[s].volumesArm.forEach(v => volumes.add(v))
|
||||
} else {
|
||||
template.services[s]?.volumes && template.services[s].volumes.length > 0 && template.services[s].volumes.forEach(v => volumes.add(v))
|
||||
if (template.services[s]?.volumes?.length > 0) {
|
||||
template.services[s].volumes.forEach(v => volumes.add(v))
|
||||
}
|
||||
}
|
||||
|
||||
// Workaround: old plausible analytics service wrong volume id name
|
||||
if (service.type === 'plausibleanalytics' && service.plausibleAnalytics?.id) {
|
||||
let temp = Array.from(volumes)
|
||||
|
||||
@@ -640,8 +640,7 @@ export async function restartApplication(
|
||||
|
||||
const volumes =
|
||||
persistentStorage?.map((storage) => {
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : ''
|
||||
}${storage.path}`;
|
||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||
}) || [];
|
||||
const composeVolumes = volumes.map((volume) => {
|
||||
return {
|
||||
@@ -736,7 +735,7 @@ export async function deleteApplication(
|
||||
where: { id },
|
||||
include: { destinationDocker: true, teams: true }
|
||||
});
|
||||
if (teamId !== '0' || !application.teams.some((team) => team.id === teamId)) {
|
||||
if (teamId !== '0' && !application.teams.some((team) => team.id === teamId)) {
|
||||
throw { status: 403, message: 'You are not allowed to delete this application.' };
|
||||
}
|
||||
if (application?.destinationDocker?.id && application.destinationDocker?.network) {
|
||||
@@ -1340,16 +1339,16 @@ export async function getStorages(request: FastifyRequest<OnlyId>) {
|
||||
export async function saveStorage(request: FastifyRequest<SaveStorage>, reply: FastifyReply) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { path, newStorage, storageId } = request.body;
|
||||
const { hostPath, path, newStorage, storageId } = request.body;
|
||||
|
||||
if (newStorage) {
|
||||
await prisma.applicationPersistentStorage.create({
|
||||
data: { path, application: { connect: { id } } }
|
||||
data: { hostPath, path, application: { connect: { id } } }
|
||||
});
|
||||
} else {
|
||||
await prisma.applicationPersistentStorage.update({
|
||||
where: { id: storageId },
|
||||
data: { path }
|
||||
data: { hostPath, path }
|
||||
});
|
||||
}
|
||||
return reply.code(201).send();
|
||||
|
||||
@@ -96,6 +96,7 @@ export interface DeleteSecret extends OnlyId {
|
||||
}
|
||||
export interface SaveStorage extends OnlyId {
|
||||
Body: {
|
||||
hostPath?: string;
|
||||
path: string;
|
||||
newStorage: boolean;
|
||||
storageId: string;
|
||||
|
||||
@@ -302,7 +302,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
||||
databaseSecret
|
||||
} = database;
|
||||
const { privatePort, command, environmentVariables, image, volume, ulimits } =
|
||||
generateDatabaseConfiguration(database, arch);
|
||||
generateDatabaseConfiguration(database);
|
||||
|
||||
const network = destinationDockerId && destinationDocker.network;
|
||||
const volumeName = volume.split(':')[0];
|
||||
|
||||
@@ -1,279 +1,384 @@
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import sshConfig from 'ssh-config'
|
||||
import fs from 'fs/promises'
|
||||
import os from 'os';
|
||||
|
||||
import { createRemoteEngineConfiguration, decrypt, errorHandler, executeCommand, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
||||
import {
|
||||
errorHandler,
|
||||
executeCommand,
|
||||
listSettings,
|
||||
prisma,
|
||||
startTraefikProxy,
|
||||
stopTraefikProxy
|
||||
} from '../../../../lib/common';
|
||||
import { checkContainer } from '../../../../lib/docker';
|
||||
|
||||
import type { OnlyId } from '../../../../types';
|
||||
import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types';
|
||||
import type {
|
||||
CheckDestination,
|
||||
ListDestinations,
|
||||
NewDestination,
|
||||
Proxy,
|
||||
SaveDestinationSettings
|
||||
} from './types';
|
||||
import { removeService } from '../../../../lib/services/common';
|
||||
|
||||
export async function listDestinations(request: FastifyRequest<ListDestinations>) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { onlyVerified = false } = request.query
|
||||
let destinations = []
|
||||
if (teamId === '0') {
|
||||
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
||||
} else {
|
||||
destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id: teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
if (onlyVerified) {
|
||||
destinations = destinations.filter(destination => destination.engine || (destination.remoteEngine && destination.remoteVerified))
|
||||
}
|
||||
return {
|
||||
destinations
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { onlyVerified = false } = request.query;
|
||||
let destinations = [];
|
||||
if (teamId === '0') {
|
||||
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
||||
} else {
|
||||
destinations = await prisma.destinationDocker.findMany({
|
||||
where: { teams: { some: { id: teamId } } },
|
||||
include: { teams: true }
|
||||
});
|
||||
}
|
||||
if (onlyVerified) {
|
||||
destinations = destinations.filter(
|
||||
(destination) =>
|
||||
destination.engine || (destination.remoteEngine && destination.remoteVerified)
|
||||
);
|
||||
}
|
||||
return {
|
||||
destinations
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function checkDestination(request: FastifyRequest<CheckDestination>) {
|
||||
try {
|
||||
const { network } = request.body;
|
||||
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
||||
if (found) {
|
||||
throw {
|
||||
message: `Network already exists: ${network}`
|
||||
};
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { network } = request.body;
|
||||
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
||||
if (found) {
|
||||
throw {
|
||||
message: `Network already exists: ${network}`
|
||||
};
|
||||
}
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function getDestination(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const teamId = request.user?.teamId;
|
||||
const destination = await prisma.destinationDocker.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { sshKey: true, application: true, service: true, database: true }
|
||||
});
|
||||
if (!destination && id !== 'new') {
|
||||
throw { status: 404, message: `Destination not found.` };
|
||||
}
|
||||
const settings = await listSettings();
|
||||
const payload = {
|
||||
destination,
|
||||
settings
|
||||
};
|
||||
return {
|
||||
...payload
|
||||
};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const teamId = request.user?.teamId;
|
||||
const destination = await prisma.destinationDocker.findFirst({
|
||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
include: { sshKey: true, application: true, service: true, database: true }
|
||||
});
|
||||
if (!destination && id !== 'new') {
|
||||
throw { status: 404, message: `Destination not found.` };
|
||||
}
|
||||
const settings = await listSettings();
|
||||
const payload = {
|
||||
destination,
|
||||
settings
|
||||
};
|
||||
return {
|
||||
...payload
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function newDestination(request: FastifyRequest<NewDestination>, reply: FastifyReply) {
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params
|
||||
try {
|
||||
const teamId = request.user.teamId;
|
||||
const { id } = request.params;
|
||||
|
||||
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
||||
if (id === 'new') {
|
||||
if (engine) {
|
||||
const { stdout } = await await executeCommand({ command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'` });
|
||||
if (stdout === '') {
|
||||
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
||||
}
|
||||
await prisma.destinationDocker.create({
|
||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||
});
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||
const destination = destinations.find((destination) => destination.network === network);
|
||||
if (destinations.length > 0) {
|
||||
const proxyConfigured = destinations.find(
|
||||
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
|
||||
);
|
||||
if (proxyConfigured) {
|
||||
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||
}
|
||||
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
|
||||
}
|
||||
if (isCoolifyProxyUsed) {
|
||||
await startTraefikProxy(destination.id);
|
||||
}
|
||||
return reply.code(201).send({ id: destination.id });
|
||||
} else {
|
||||
const destination = await prisma.destinationDocker.create({
|
||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort: Number(remotePort) }
|
||||
});
|
||||
return reply.code(201).send({ id: destination.id })
|
||||
}
|
||||
} else {
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||
return reply.code(201).send();
|
||||
}
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } =
|
||||
request.body;
|
||||
if (id === 'new') {
|
||||
if (engine) {
|
||||
const { stdout } = await await executeCommand({
|
||||
command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'`
|
||||
});
|
||||
if (stdout === '') {
|
||||
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
||||
}
|
||||
await prisma.destinationDocker.create({
|
||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||
});
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||
const destination = destinations.find((destination) => destination.network === network);
|
||||
if (destinations.length > 0) {
|
||||
const proxyConfigured = destinations.find(
|
||||
(destination) =>
|
||||
destination.network !== network && destination.isCoolifyProxyUsed === true
|
||||
);
|
||||
if (proxyConfigured) {
|
||||
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||
}
|
||||
await prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
}
|
||||
if (isCoolifyProxyUsed) {
|
||||
await startTraefikProxy(destination.id);
|
||||
}
|
||||
return reply.code(201).send({ id: destination.id });
|
||||
} else {
|
||||
const destination = await prisma.destinationDocker.create({
|
||||
data: {
|
||||
name,
|
||||
teams: { connect: { id: teamId } },
|
||||
engine,
|
||||
network,
|
||||
isCoolifyProxyUsed,
|
||||
remoteEngine: true,
|
||||
remoteIpAddress,
|
||||
remoteUser,
|
||||
remotePort: Number(remotePort)
|
||||
}
|
||||
});
|
||||
return reply.code(201).send({ id: destination.id });
|
||||
}
|
||||
} else {
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||
return reply.code(201).send();
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function forceDeleteDestination(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const services = await prisma.service.findMany({ where: { destinationDockerId: id } });
|
||||
for (const service of services) {
|
||||
await removeService({ id: service.id });
|
||||
}
|
||||
const applications = await prisma.application.findMany({ where: { destinationDockerId: id } });
|
||||
for (const application of applications) {
|
||||
await prisma.applicationSettings.deleteMany({ where: { application: { id: application.id } } });
|
||||
await prisma.buildLog.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.build.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.secret.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: application.id } });
|
||||
await prisma.previewApplication.deleteMany({ where: { applicationId: application.id } });
|
||||
}
|
||||
const databases = await prisma.database.findMany({ where: { destinationDockerId: id } });
|
||||
for (const database of databases) {
|
||||
await prisma.databaseSettings.deleteMany({ where: { databaseId: database.id } });
|
||||
await prisma.databaseSecret.deleteMany({ where: { databaseId: database.id } });
|
||||
await prisma.database.delete({ where: { id: database.id } });
|
||||
}
|
||||
await prisma.destinationDocker.delete({ where: { id } });
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function deleteDestination(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
||||
if (isCoolifyProxyUsed) {
|
||||
if (engine || remoteVerified) {
|
||||
const { stdout: found } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||
})
|
||||
if (found) {
|
||||
await executeCommand({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
||||
await executeCommand({ dockerId: id, command: `docker network rm ${network}` })
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.destinationDocker.delete({ where: { id } });
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const appFound = await prisma.application.findFirst({ where: { destinationDockerId: id } });
|
||||
const serviceFound = await prisma.service.findFirst({ where: { destinationDockerId: id } });
|
||||
const databaseFound = await prisma.database.findFirst({ where: { destinationDockerId: id } });
|
||||
if (appFound || serviceFound || databaseFound) {
|
||||
throw {
|
||||
message: `Destination is in use.<br>Remove all applications, services and databases using this destination first.`
|
||||
};
|
||||
}
|
||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } =
|
||||
await prisma.destinationDocker.findUnique({ where: { id } });
|
||||
if (isCoolifyProxyUsed) {
|
||||
if (engine || remoteVerified) {
|
||||
const { stdout: found } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||
});
|
||||
if (found) {
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network disconnect ${network} coolify-proxy`
|
||||
});
|
||||
await executeCommand({ dockerId: id, command: `docker network rm ${network}` });
|
||||
}
|
||||
}
|
||||
}
|
||||
await prisma.destinationDocker.delete({ where: { id } });
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function saveDestinationSettings(request: FastifyRequest<SaveDestinationSettings>) {
|
||||
try {
|
||||
const { engine, isCoolifyProxyUsed } = request.body;
|
||||
await prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
try {
|
||||
const { engine, isCoolifyProxyUsed } = request.body;
|
||||
await prisma.destinationDocker.updateMany({
|
||||
where: { engine },
|
||||
data: { isCoolifyProxyUsed }
|
||||
});
|
||||
|
||||
return {
|
||||
status: 202
|
||||
}
|
||||
// return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
return {
|
||||
status: 202
|
||||
};
|
||||
// return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function startProxy(request: FastifyRequest<Proxy>) {
|
||||
const { id } = request.params
|
||||
try {
|
||||
await startTraefikProxy(id);
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
await stopTraefikProxy(id);
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
const { id } = request.params;
|
||||
try {
|
||||
await startTraefikProxy(id);
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
await stopTraefikProxy(id);
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function stopProxy(request: FastifyRequest<Proxy>) {
|
||||
const { id } = request.params
|
||||
try {
|
||||
await stopTraefikProxy(id);
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
const { id } = request.params;
|
||||
try {
|
||||
await stopTraefikProxy(id);
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function restartProxy(request: FastifyRequest<Proxy>) {
|
||||
const { id } = request.params
|
||||
try {
|
||||
await stopTraefikProxy(id);
|
||||
await startTraefikProxy(id);
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: false }
|
||||
});
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
const { id } = request.params;
|
||||
try {
|
||||
await stopTraefikProxy(id);
|
||||
await startTraefikProxy(id);
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: false }
|
||||
});
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function assignSSHKey(request: FastifyRequest) {
|
||||
try {
|
||||
const { id: sshKeyId } = request.body;
|
||||
const { id } = request.params;
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { sshKey: { connect: { id: sshKeyId } } } })
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { id: sshKeyId } = request.body;
|
||||
const { id } = request.params;
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { sshKey: { connect: { id: sshKeyId } } }
|
||||
});
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function verifyRemoteDockerEngineFn(id: string) {
|
||||
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
||||
const daemonJson = `daemon-${id}.json`
|
||||
try {
|
||||
await executeCommand({ sshCommand: true, command: `docker network inspect ${network}`, dockerId: id });
|
||||
} catch (error) {
|
||||
await executeCommand({ command: `docker network create --attachable ${network}`, dockerId: id });
|
||||
}
|
||||
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst(
|
||||
{ where: { id } }
|
||||
);
|
||||
const daemonJson = `daemon-${id}.json`;
|
||||
try {
|
||||
await executeCommand({
|
||||
sshCommand: true,
|
||||
command: `docker network inspect ${network}`,
|
||||
dockerId: id
|
||||
});
|
||||
} catch (error) {
|
||||
await executeCommand({
|
||||
command: `docker network create --attachable ${network}`,
|
||||
dockerId: id
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await executeCommand({ sshCommand: true, command: `docker network inspect coolify-infra`, dockerId: id });
|
||||
} catch (error) {
|
||||
await executeCommand({ command: `docker network create --attachable coolify-infra`, dockerId: id });
|
||||
}
|
||||
try {
|
||||
await executeCommand({
|
||||
sshCommand: true,
|
||||
command: `docker network inspect coolify-infra`,
|
||||
dockerId: id
|
||||
});
|
||||
} catch (error) {
|
||||
await executeCommand({
|
||||
command: `docker network create --attachable coolify-infra`,
|
||||
dockerId: id
|
||||
});
|
||||
}
|
||||
|
||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||
let isUpdated = false;
|
||||
let daemonJsonParsed = {
|
||||
"live-restore": true,
|
||||
"features": {
|
||||
"buildkit": true
|
||||
}
|
||||
};
|
||||
try {
|
||||
const { stdout: daemonJson } = await executeCommand({ sshCommand: true, dockerId: id, command: `cat /etc/docker/daemon.json` });
|
||||
daemonJsonParsed = JSON.parse(daemonJson);
|
||||
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
||||
isUpdated = true;
|
||||
daemonJsonParsed['live-restore'] = true
|
||||
|
||||
}
|
||||
if (!daemonJsonParsed?.features?.buildkit) {
|
||||
isUpdated = true;
|
||||
daemonJsonParsed.features = {
|
||||
buildkit: true
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
isUpdated = true;
|
||||
}
|
||||
try {
|
||||
if (isUpdated) {
|
||||
await executeCommand({ shell: true, command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}` })
|
||||
await executeCommand({ dockerId: id, command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json` });
|
||||
await executeCommand({ command: `rm /tmp/${daemonJson}` })
|
||||
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
||||
}
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
||||
} catch (error) {
|
||||
throw new Error('Error while verifying remote docker engine')
|
||||
}
|
||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||
let isUpdated = false;
|
||||
let daemonJsonParsed = {
|
||||
'live-restore': true,
|
||||
features: {
|
||||
buildkit: true
|
||||
}
|
||||
};
|
||||
try {
|
||||
const { stdout: daemonJson } = await executeCommand({
|
||||
sshCommand: true,
|
||||
dockerId: id,
|
||||
command: `cat /etc/docker/daemon.json`
|
||||
});
|
||||
daemonJsonParsed = JSON.parse(daemonJson);
|
||||
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
||||
isUpdated = true;
|
||||
daemonJsonParsed['live-restore'] = true;
|
||||
}
|
||||
if (!daemonJsonParsed?.features?.buildkit) {
|
||||
isUpdated = true;
|
||||
daemonJsonParsed.features = {
|
||||
buildkit: true
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
isUpdated = true;
|
||||
}
|
||||
try {
|
||||
if (isUpdated) {
|
||||
await executeCommand({
|
||||
shell: true,
|
||||
command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}`
|
||||
});
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json`
|
||||
});
|
||||
await executeCommand({ command: `rm /tmp/${daemonJson}` });
|
||||
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
||||
}
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } });
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
throw new Error('Error while verifying remote docker engine');
|
||||
}
|
||||
}
|
||||
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
||||
const { id } = request.params;
|
||||
try {
|
||||
await verifyRemoteDockerEngineFn(id);
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } })
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
export async function verifyRemoteDockerEngine(
|
||||
request: FastifyRequest<OnlyId>,
|
||||
reply: FastifyReply
|
||||
) {
|
||||
const { id } = request.params;
|
||||
try {
|
||||
await verifyRemoteDockerEngineFn(id);
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } });
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const destination = await prisma.destinationDocker.findUnique({ where: { id } })
|
||||
const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
|
||||
return {
|
||||
isRunning
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const destination = await prisma.destinationDocker.findUnique({ where: { id } });
|
||||
const { found: isRunning } = await checkContainer({
|
||||
dockerId: destination.id,
|
||||
container: 'coolify-proxy',
|
||||
remove: true
|
||||
});
|
||||
return {
|
||||
isRunning
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { FastifyPluginAsync } from 'fastify';
|
||||
import { assignSSHKey, checkDestination, deleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers';
|
||||
import { assignSSHKey, checkDestination, deleteDestination, forceDeleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers';
|
||||
|
||||
import type { OnlyId } from '../../../../types';
|
||||
import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types';
|
||||
@@ -14,6 +14,7 @@ const root: FastifyPluginAsync = async (fastify): Promise<void> => {
|
||||
fastify.get<OnlyId>('/:id', async (request) => await getDestination(request));
|
||||
fastify.post<NewDestination>('/:id', async (request, reply) => await newDestination(request, reply));
|
||||
fastify.delete<OnlyId>('/:id', async (request) => await deleteDestination(request));
|
||||
fastify.delete<OnlyId>('/:id/force', async (request) => await forceDeleteDestination(request));
|
||||
fastify.get<OnlyId>('/:id/status', async (request) => await getDestinationStatus(request));
|
||||
|
||||
fastify.post<SaveDestinationSettings>('/:id/settings', async (request) => await saveDestinationSettings(request));
|
||||
|
||||
@@ -156,14 +156,21 @@ export async function update(request: FastifyRequest<Update>) {
|
||||
try {
|
||||
if (!isDev) {
|
||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
||||
let image = `ghcr.io/coollabsio/coolify:${latestVersion}`;
|
||||
try {
|
||||
await executeCommand({ command: `docker pull ${image}` });
|
||||
} catch (error) {
|
||||
image = `coollabsio/coolify:${latestVersion}`;
|
||||
await executeCommand({ command: `docker pull ${image}` });
|
||||
}
|
||||
|
||||
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
||||
await executeCommand({
|
||||
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||
});
|
||||
await executeCommand({
|
||||
shell: true,
|
||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ${image} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||
});
|
||||
return {};
|
||||
} else {
|
||||
|
||||
@@ -412,22 +412,23 @@ export async function saveServiceType(
|
||||
if (foundTemplate.variables) {
|
||||
if (foundTemplate.variables.length > 0) {
|
||||
for (const variable of foundTemplate.variables) {
|
||||
const { defaultValue } = variable;
|
||||
let { defaultValue } = variable;
|
||||
defaultValue = defaultValue.toString();
|
||||
const regex = /^\$\$.*\((\d+)\)$/g;
|
||||
const length = Number(regex.exec(defaultValue)?.[1]) || undefined;
|
||||
if (variable.defaultValue.startsWith('$$generate_password')) {
|
||||
if (defaultValue.startsWith('$$generate_password')) {
|
||||
variable.value = generatePassword({ length });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_hex')) {
|
||||
} else if (defaultValue.startsWith('$$generate_hex')) {
|
||||
variable.value = generatePassword({ length, isHex: true });
|
||||
} else if (variable.defaultValue.startsWith('$$generate_username')) {
|
||||
} else if (defaultValue.startsWith('$$generate_username')) {
|
||||
variable.value = cuid();
|
||||
} else if (variable.defaultValue.startsWith('$$generate_token')) {
|
||||
} else if (defaultValue.startsWith('$$generate_token')) {
|
||||
variable.value = generateToken();
|
||||
} else {
|
||||
variable.value = variable.defaultValue || '';
|
||||
variable.value = defaultValue || '';
|
||||
}
|
||||
const foundVariableSomewhereElse = foundTemplate.variables.find((v) =>
|
||||
v.defaultValue.includes(variable.id)
|
||||
v.defaultValue.toString().includes(variable.id)
|
||||
);
|
||||
if (foundVariableSomewhereElse) {
|
||||
foundVariableSomewhereElse.value = foundVariableSomewhereElse.value.replaceAll(
|
||||
@@ -746,7 +747,10 @@ export async function saveService(request: FastifyRequest<SaveService>, reply: F
|
||||
let { id: settingId, name, value, changed = false, isNew = false, variableName } = setting;
|
||||
if (value) {
|
||||
if (changed) {
|
||||
await prisma.serviceSetting.update({ where: { id: settingId }, data: { value: value.replace(/\n/, "\\n") } });
|
||||
await prisma.serviceSetting.update({
|
||||
where: { id: settingId },
|
||||
data: { value: value.replace(/\n/, '\\n') }
|
||||
});
|
||||
}
|
||||
if (isNew) {
|
||||
if (!variableName) {
|
||||
@@ -1101,14 +1105,17 @@ export async function activateWordpressFtp(
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
} catch (error) { }
|
||||
} catch (error) {}
|
||||
const volumes = [
|
||||
`${id}-wordpress-data:/home/${ftpUser}/wordpress`,
|
||||
`${isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
`${
|
||||
isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
}/${id}.ed25519:/etc/ssh/ssh_host_ed25519_key`,
|
||||
`${isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
`${
|
||||
isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
}/${id}.rsa:/etc/ssh/ssh_host_rsa_key`,
|
||||
`${isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
`${
|
||||
isDev ? hostkeyDir : '/var/lib/docker/volumes/coolify-ssl-certs/_data/hostkeys'
|
||||
}/${id}.sh:/etc/sftp.d/chmod.sh`
|
||||
];
|
||||
|
||||
@@ -1178,6 +1185,6 @@ export async function activateWordpressFtp(
|
||||
await executeCommand({
|
||||
command: `rm -fr ${hostkeyDir}/${id}-docker-compose.yml ${hostkeyDir}/${id}.ed25519 ${hostkeyDir}/${id}.ed25519.pub ${hostkeyDir}/${id}.rsa ${hostkeyDir}/${id}.rsa.pub ${hostkeyDir}/${id}.sh`
|
||||
});
|
||||
} catch (error) { }
|
||||
} catch (error) {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,191 +1,231 @@
|
||||
import cuid from 'cuid';
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { decrypt, encrypt, errorHandler, prisma } from '../../../../lib/common';
|
||||
import { OnlyId } from '../../../../types';
|
||||
import { CheckGitLabOAuthId, SaveGitHubSource, SaveGitLabSource } from './types';
|
||||
|
||||
export async function listSources(request: FastifyRequest) {
|
||||
try {
|
||||
const teamId = request.user?.teamId;
|
||||
const sources = await prisma.gitSource.findMany({
|
||||
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { teams: true, githubApp: true, gitlabApp: true }
|
||||
});
|
||||
return {
|
||||
sources
|
||||
}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const teamId = request.user?.teamId;
|
||||
const sources = await prisma.gitSource.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
{ isSystemWide: true }
|
||||
]
|
||||
},
|
||||
include: { teams: true, githubApp: true, gitlabApp: true }
|
||||
});
|
||||
return {
|
||||
sources
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function saveSource(request, reply) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body
|
||||
if (customPort) customPort = Number(customPort)
|
||||
await prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
||||
});
|
||||
return reply.code(201).send()
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
try {
|
||||
const { id } = request.params;
|
||||
let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body;
|
||||
if (customPort) customPort = Number(customPort);
|
||||
await prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
||||
});
|
||||
return reply.code(201).send();
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function getSource(request: FastifyRequest<OnlyId>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { teamId } = request.user;
|
||||
const settings = await prisma.setting.findFirst({});
|
||||
|
||||
if (id === 'new') {
|
||||
return {
|
||||
source: {
|
||||
name: null,
|
||||
type: null,
|
||||
htmlUrl: null,
|
||||
apiUrl: null,
|
||||
organization: null,
|
||||
customPort: 22,
|
||||
customUser: 'git',
|
||||
},
|
||||
settings
|
||||
}
|
||||
}
|
||||
if (id === 'new') {
|
||||
return {
|
||||
source: {
|
||||
name: null,
|
||||
type: null,
|
||||
htmlUrl: null,
|
||||
apiUrl: null,
|
||||
organization: null,
|
||||
customPort: 22,
|
||||
customUser: 'git'
|
||||
},
|
||||
settings
|
||||
};
|
||||
}
|
||||
|
||||
const source = await prisma.gitSource.findFirst({
|
||||
where: { id, OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (!source) {
|
||||
throw { status: 404, message: 'Source not found.' }
|
||||
}
|
||||
const source = await prisma.gitSource.findFirst({
|
||||
where: {
|
||||
id,
|
||||
OR: [
|
||||
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||
{ isSystemWide: true }
|
||||
]
|
||||
},
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (!source) {
|
||||
throw { status: 404, message: 'Source not found.' };
|
||||
}
|
||||
|
||||
if (source?.githubApp?.clientSecret)
|
||||
source.githubApp.clientSecret = decrypt(source.githubApp.clientSecret);
|
||||
if (source?.githubApp?.webhookSecret)
|
||||
source.githubApp.webhookSecret = decrypt(source.githubApp.webhookSecret);
|
||||
if (source?.githubApp?.privateKey) source.githubApp.privateKey = decrypt(source.githubApp.privateKey);
|
||||
if (source?.gitlabApp?.appSecret) source.gitlabApp.appSecret = decrypt(source.gitlabApp.appSecret);
|
||||
if (source?.githubApp?.clientSecret)
|
||||
source.githubApp.clientSecret = decrypt(source.githubApp.clientSecret);
|
||||
if (source?.githubApp?.webhookSecret)
|
||||
source.githubApp.webhookSecret = decrypt(source.githubApp.webhookSecret);
|
||||
if (source?.githubApp?.privateKey)
|
||||
source.githubApp.privateKey = decrypt(source.githubApp.privateKey);
|
||||
if (source?.gitlabApp?.appSecret)
|
||||
source.gitlabApp.appSecret = decrypt(source.gitlabApp.appSecret);
|
||||
|
||||
return {
|
||||
source,
|
||||
settings
|
||||
};
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
return {
|
||||
source,
|
||||
settings
|
||||
};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteSource(request) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const source = await prisma.gitSource.delete({
|
||||
where: { id },
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (source.githubAppId) {
|
||||
await prisma.githubApp.delete({ where: { id: source.githubAppId } });
|
||||
}
|
||||
if (source.gitlabAppId) {
|
||||
await prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const gitAppFound = await prisma.application.findFirst({ where: { gitSourceId: id } });
|
||||
if (gitAppFound) {
|
||||
throw {
|
||||
status: 400,
|
||||
message: 'This source is used by an application. Please remove the application first.'
|
||||
};
|
||||
}
|
||||
const source = await prisma.gitSource.delete({
|
||||
where: { id },
|
||||
include: { githubApp: true, gitlabApp: true }
|
||||
});
|
||||
if (source.githubAppId) {
|
||||
await prisma.githubApp.delete({ where: { id: source.githubAppId } });
|
||||
}
|
||||
if (source.gitlabAppId) {
|
||||
await prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
|
||||
}
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>) {
|
||||
try {
|
||||
const { teamId } = request.user
|
||||
try {
|
||||
const { teamId } = request.user;
|
||||
|
||||
const { id } = request.params
|
||||
let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body
|
||||
const { id } = request.params;
|
||||
let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body;
|
||||
|
||||
if (customPort) customPort = Number(customPort)
|
||||
if (id === 'new') {
|
||||
const newId = cuid()
|
||||
await prisma.gitSource.create({
|
||||
data: {
|
||||
id: newId,
|
||||
name,
|
||||
htmlUrl,
|
||||
apiUrl,
|
||||
organization,
|
||||
customPort,
|
||||
isSystemWide,
|
||||
type: 'github',
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
id: newId
|
||||
}
|
||||
}
|
||||
throw { status: 500, message: 'Wrong request.' }
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
if (customPort) customPort = Number(customPort);
|
||||
if (id === 'new') {
|
||||
const newId = cuid();
|
||||
await prisma.gitSource.create({
|
||||
data: {
|
||||
id: newId,
|
||||
name,
|
||||
htmlUrl,
|
||||
apiUrl,
|
||||
organization,
|
||||
customPort,
|
||||
isSystemWide,
|
||||
type: 'github',
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
id: newId
|
||||
};
|
||||
}
|
||||
throw { status: 500, message: 'Wrong request.' };
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>) {
|
||||
try {
|
||||
const { id } = request.params
|
||||
const { teamId } = request.user
|
||||
let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort, customUser } =
|
||||
request.body
|
||||
try {
|
||||
const { id } = request.params;
|
||||
const { teamId } = request.user;
|
||||
let {
|
||||
type,
|
||||
name,
|
||||
htmlUrl,
|
||||
apiUrl,
|
||||
oauthId,
|
||||
appId,
|
||||
appSecret,
|
||||
groupName,
|
||||
customPort,
|
||||
customUser
|
||||
} = request.body;
|
||||
|
||||
if (oauthId) oauthId = Number(oauthId);
|
||||
if (customPort) customPort = Number(customPort)
|
||||
const encryptedAppSecret = encrypt(appSecret);
|
||||
if (oauthId) oauthId = Number(oauthId);
|
||||
if (customPort) customPort = Number(customPort);
|
||||
const encryptedAppSecret = encrypt(appSecret);
|
||||
|
||||
if (id === 'new') {
|
||||
const newId = cuid()
|
||||
await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, customUser, teams: { connect: { id: teamId } } } });
|
||||
await prisma.gitlabApp.create({
|
||||
data: {
|
||||
teams: { connect: { id: teamId } },
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret,
|
||||
gitSource: { connect: { id: newId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
status: 201,
|
||||
id: newId
|
||||
}
|
||||
} else {
|
||||
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort, customUser } });
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id },
|
||||
data: {
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret,
|
||||
}
|
||||
});
|
||||
}
|
||||
return { status: 201 };
|
||||
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
if (id === 'new') {
|
||||
const newId = cuid();
|
||||
await prisma.gitSource.create({
|
||||
data: {
|
||||
id: newId,
|
||||
type,
|
||||
apiUrl,
|
||||
htmlUrl,
|
||||
name,
|
||||
customPort,
|
||||
customUser,
|
||||
teams: { connect: { id: teamId } }
|
||||
}
|
||||
});
|
||||
await prisma.gitlabApp.create({
|
||||
data: {
|
||||
teams: { connect: { id: teamId } },
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret,
|
||||
gitSource: { connect: { id: newId } }
|
||||
}
|
||||
});
|
||||
return {
|
||||
status: 201,
|
||||
id: newId
|
||||
};
|
||||
} else {
|
||||
await prisma.gitSource.update({
|
||||
where: { id },
|
||||
data: { type, apiUrl, htmlUrl, name, customPort, customUser }
|
||||
});
|
||||
await prisma.gitlabApp.update({
|
||||
where: { id },
|
||||
data: {
|
||||
appId,
|
||||
oauthId,
|
||||
groupName,
|
||||
appSecret: encryptedAppSecret
|
||||
}
|
||||
});
|
||||
}
|
||||
return { status: 201 };
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkGitLabOAuthID(request: FastifyRequest<CheckGitLabOAuthId>) {
|
||||
try {
|
||||
const { oauthId } = request.body
|
||||
const found = await prisma.gitlabApp.findFirst({ where: { oauthId: Number(oauthId) } });
|
||||
if (found) {
|
||||
throw { status: 500, message: 'OAuthID already configured in Coolify.' }
|
||||
}
|
||||
return {}
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message })
|
||||
}
|
||||
}
|
||||
try {
|
||||
const { oauthId } = request.body;
|
||||
const found = await prisma.gitlabApp.findFirst({ where: { oauthId: Number(oauthId) } });
|
||||
if (found) {
|
||||
throw { status: 500, message: 'OAuthID already configured in Coolify.' };
|
||||
}
|
||||
return {};
|
||||
} catch ({ status, message }) {
|
||||
return errorHandler({ status, message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -543,6 +543,9 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
||||
const template: any = await parseAndFindServiceTemplates(service, null, true);
|
||||
const { proxy } = template.services[oneService] || found.services[oneService];
|
||||
for (let configuration of proxy) {
|
||||
if (configuration.hostPort) {
|
||||
continue;
|
||||
}
|
||||
if (configuration.domain) {
|
||||
const setting = serviceSetting.find(
|
||||
(a) => a.variableName === configuration.domain
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -12,6 +12,7 @@
|
||||
import { errorNotification } from '$lib/common';
|
||||
import { addToast } from '$lib/store';
|
||||
import CopyVolumeField from '$lib/components/CopyVolumeField.svelte';
|
||||
import SimpleExplainer from '$lib/components/SimpleExplainer.svelte';
|
||||
const { id } = $page.params;
|
||||
let isHttps = browser && window.location.protocol === 'https:';
|
||||
export let value: string;
|
||||
@@ -33,11 +34,13 @@
|
||||
storage.path.replace(/\/\//g, '/');
|
||||
await post(`/applications/${id}/storages`, {
|
||||
path: storage.path,
|
||||
hostPath: storage.hostPath,
|
||||
storageId: storage.id,
|
||||
newStorage
|
||||
});
|
||||
dispatch('refresh');
|
||||
if (isNew) {
|
||||
storage.hostPath = null;
|
||||
storage.path = null;
|
||||
storage.id = null;
|
||||
}
|
||||
@@ -80,27 +83,42 @@
|
||||
<div class="flex gap-4 pb-2" class:pt-8={isNew}>
|
||||
{#if storage.applicationId}
|
||||
{#if storage.oldPath}
|
||||
|
||||
<CopyVolumeField
|
||||
<CopyVolumeField
|
||||
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
||||
/>
|
||||
{:else if !storage.hostPath}
|
||||
<CopyVolumeField
|
||||
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
||||
/>
|
||||
{:else}
|
||||
|
||||
<CopyVolumeField
|
||||
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
||||
/>
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
{#if isNew}
|
||||
<div class="w-full">
|
||||
<input
|
||||
disabled={!isNew}
|
||||
readonly={!isNew}
|
||||
bind:value={storage.hostPath}
|
||||
placeholder="Host path, example: ~/.directory"
|
||||
/>
|
||||
|
||||
<SimpleExplainer
|
||||
text="You can mount <span class='text-yellow-400 font-bold'>host paths</span> from the operating system.<br>Leave it empty to define a volume based volume."
|
||||
/>
|
||||
</div>
|
||||
{:else if storage.hostPath}
|
||||
<input disabled readonly value={storage.hostPath} />
|
||||
{/if}
|
||||
<input
|
||||
disabled={!isNew}
|
||||
readonly={!isNew}
|
||||
class="w-full"
|
||||
bind:value={storage.path}
|
||||
required
|
||||
placeholder="eg: /data"
|
||||
placeholder="Mount point inside the container, example: /data"
|
||||
/>
|
||||
|
||||
<div class="flex items-center justify-center">
|
||||
<div class="flex items-start justify-center">
|
||||
{#if isNew}
|
||||
<div class="w-full lg:w-64">
|
||||
<button class="btn btn-sm btn-primary w-full" on:click={() => saveStorage(true)}
|
||||
|
||||
@@ -427,29 +427,6 @@
|
||||
</svg> Stop
|
||||
</button>
|
||||
{:else if $isDeploymentEnabled && !$page.url.pathname.startsWith(`/applications/${id}/configuration/`)}
|
||||
{#if $status.application.overallStatus === 'degraded'}
|
||||
<button
|
||||
on:click={stopApplication}
|
||||
type="submit"
|
||||
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
||||
class="btn btn-sm gap-2"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="w-6 h-6 text-error"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||
<rect x="6" y="5" width="4" height="14" rx="1" />
|
||||
<rect x="14" y="5" width="4" height="14" rx="1" />
|
||||
</svg> Stop
|
||||
</button>
|
||||
{/if}
|
||||
<button
|
||||
class="btn btn-sm gap-2"
|
||||
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
||||
@@ -493,6 +470,29 @@
|
||||
: 'Redeploy Stack'
|
||||
: 'Deploy'}
|
||||
</button>
|
||||
{#if $status.application.overallStatus === 'degraded'}
|
||||
<button
|
||||
on:click={stopApplication}
|
||||
type="submit"
|
||||
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
||||
class="btn btn-sm gap-2"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="w-6 h-6 text-error"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||
<rect x="6" y="5" width="4" height="14" rx="1" />
|
||||
<rect x="14" y="5" width="4" height="14" rx="1" />
|
||||
</svg> Stop
|
||||
</button>
|
||||
{/if}
|
||||
{/if}
|
||||
{#if $location && $status.application.overallStatus === 'healthy'}
|
||||
<a href={$location} target="_blank noreferrer" class="btn btn-sm gap-2 text-sm bg-primary"
|
||||
|
||||
@@ -406,7 +406,7 @@
|
||||
>
|
||||
{#if tryAgain}
|
||||
<div class="p-5">
|
||||
An error occured during authenticating with GitLab. Please check your GitLab Source
|
||||
An error occurred during authenticating with GitLab. Please check your GitLab Source
|
||||
configuration <a href={`/sources/${application.gitSource.id}`}>here.</a>
|
||||
</div>
|
||||
<button
|
||||
|
||||
@@ -51,16 +51,22 @@
|
||||
async function loadLogs() {
|
||||
if (logsLoading) return;
|
||||
try {
|
||||
const newLogs: any = await get(
|
||||
`/applications/${id}/logs/${selectedService}?since=${lastLog?.split(' ')[0] || 0}`
|
||||
);
|
||||
const since = lastLog?.split(' ')[0] || 0;
|
||||
const newLogs: any = await get(`/applications/${id}/logs/${selectedService}?since=${since}`);
|
||||
if (newLogs.noContainer) {
|
||||
noContainer = true;
|
||||
} else {
|
||||
noContainer = false;
|
||||
}
|
||||
if (newLogs?.logs && newLogs.logs[newLogs.logs.length - 1] !== logs[logs.length - 1]) {
|
||||
logs = logs.concat(newLogs.logs);
|
||||
if (since === 0) {
|
||||
logs = logs.concat(newLogs.logs);
|
||||
} else {
|
||||
const newParsedLogs = newLogs.logs.filter((log: any) => {
|
||||
return log !== logs[logs.length - 1];
|
||||
});
|
||||
logs = logs.concat(newParsedLogs);
|
||||
}
|
||||
lastLog = newLogs.logs[newLogs.logs.length - 1];
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -135,7 +141,7 @@
|
||||
{:else}
|
||||
<div class="relative w-full">
|
||||
<div class="flex justify-start sticky space-x-2 pb-2">
|
||||
<button on:click={followBuild} class="btn btn-sm " class:bg-coollabs={followingLogs}>
|
||||
<button on:click={followBuild} class="btn btn-sm" class:bg-coollabs={followingLogs}>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
class="w-6 h-6 mr-2"
|
||||
|
||||
@@ -158,7 +158,7 @@
|
||||
id="dockerImage"
|
||||
name="dockerImage"
|
||||
required
|
||||
placeholder="coollabsio/coolify:0.0.1"
|
||||
placeholder="ghcr.io/coollabsio/coolify:0.0.1"
|
||||
bind:value={remoteImage}
|
||||
/>
|
||||
<button class="btn btn-sm btn-primary" type="submit">Revert Now</button>
|
||||
|
||||
@@ -35,17 +35,46 @@
|
||||
for (const [_, service] of Object.entries(composeJson.services)) {
|
||||
if (service?.volumes) {
|
||||
for (const [_, volumeName] of Object.entries(service.volumes)) {
|
||||
let [volume, target] = volumeName.split(':');
|
||||
if (volume === '.') {
|
||||
volume = target;
|
||||
if (typeof volumeName === 'string') {
|
||||
let [volume, target] = volumeName.split(':');
|
||||
if (
|
||||
volume.startsWith('.') ||
|
||||
volume.startsWith('..') ||
|
||||
volume.startsWith('/') ||
|
||||
volume.startsWith('~') ||
|
||||
volume.startsWith('$PWD')
|
||||
) {
|
||||
volume = volume.replace(/^\./, `~`).replace(/^\.\./, '~').replace(/^\$PWD/, '~');
|
||||
} else {
|
||||
if (!target) {
|
||||
target = volume;
|
||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
}
|
||||
predefinedVolumes.push({ id: volume, path: target, predefined: true });
|
||||
}
|
||||
if (!target) {
|
||||
target = volume;
|
||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
if (typeof volumeName === 'object') {
|
||||
let { source, target } = volumeName;
|
||||
if (
|
||||
source.startsWith('.') ||
|
||||
source.startsWith('..') ||
|
||||
source.startsWith('/') ||
|
||||
source.startsWith('~') ||
|
||||
source.startsWith('$PWD')
|
||||
) {
|
||||
source = source.replace(/^\./, `~`).replace(/^\.\./, '~').replace(/^\$PWD/, '~');
|
||||
} else {
|
||||
if (!target) {
|
||||
target = source;
|
||||
source = `${application.id}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
source = `${application.id}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
}
|
||||
predefinedVolumes.push({ id: source, path: target, predefined: true });
|
||||
}
|
||||
predefinedVolumes.push({ id: volume, path: target, predefined: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -88,14 +117,14 @@
|
||||
{/key}
|
||||
{/each}
|
||||
{#if $appSession.isAdmin}
|
||||
<div class:pt-10={predefinedVolumes.length > 0}>
|
||||
Add New Volume <Explainer
|
||||
position="dropdown-bottom"
|
||||
explanation={$t('application.storage.persistent_storage_explainer')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Storage on:refresh={refreshStorage} isNew />
|
||||
<div class:pt-10={predefinedVolumes.length > 0}>
|
||||
Add New Volume <Explainer
|
||||
position="dropdown-bottom"
|
||||
explanation={$t('application.storage.persistent_storage_explainer')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Storage on:refresh={refreshStorage} isNew />
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -49,23 +49,23 @@
|
||||
databaseDbUser = '';
|
||||
}
|
||||
}
|
||||
function generateUrl() {
|
||||
const ipAddress = () => {
|
||||
if ($status.database.isPublic) {
|
||||
if (database.destinationDocker.remoteEngine) {
|
||||
return database.destinationDocker.remoteIpAddress;
|
||||
}
|
||||
if ($appSession.ipv6) {
|
||||
return $appSession.ipv6;
|
||||
}
|
||||
if ($appSession.ipv4) {
|
||||
return $appSession.ipv4;
|
||||
}
|
||||
return '<Cannot determine public IP address>';
|
||||
} else {
|
||||
return database.id;
|
||||
function ipAddress() {
|
||||
if ($status.database.isPublic) {
|
||||
if (database.destinationDocker.remoteEngine) {
|
||||
return database.destinationDocker.remoteIpAddress;
|
||||
}
|
||||
};
|
||||
if ($appSession.ipv6) {
|
||||
return $appSession.ipv6;
|
||||
}
|
||||
if ($appSession.ipv4) {
|
||||
return $appSession.ipv4;
|
||||
}
|
||||
return '<Cannot determine public IP address>';
|
||||
} else {
|
||||
return database.id;
|
||||
}
|
||||
}
|
||||
function generateUrl() {
|
||||
const user = () => {
|
||||
if (databaseDbUser) {
|
||||
return databaseDbUser + ':';
|
||||
@@ -183,16 +183,38 @@
|
||||
class:cursor-pointer={!$status.database.isRunning}
|
||||
/></a
|
||||
>
|
||||
<label for="host">{$t('forms.host')}</label>
|
||||
<CopyPasswordField
|
||||
placeholder={$t('forms.generated_automatically_after_start')}
|
||||
isPasswordField={false}
|
||||
readonly
|
||||
disabled
|
||||
id="host"
|
||||
name="host"
|
||||
value={database.id}
|
||||
/>
|
||||
{#if $status.database.isPublic}
|
||||
<label for="internalHost">Internal Host</label>
|
||||
<CopyPasswordField
|
||||
isPasswordField={false}
|
||||
readonly
|
||||
disabled
|
||||
id="internalHost"
|
||||
name="internalHost"
|
||||
value={database.id}
|
||||
/>
|
||||
<label for="host">Public Host</label>
|
||||
<CopyPasswordField
|
||||
placeholder={$t('forms.generated_automatically_after_start')}
|
||||
isPasswordField={false}
|
||||
readonly
|
||||
disabled
|
||||
id="host"
|
||||
name="host"
|
||||
value={loading.public ? 'Loading...' : ipAddress()}
|
||||
/>
|
||||
{:else}
|
||||
<label for="internalHost">Host</label>
|
||||
<CopyPasswordField
|
||||
isPasswordField={false}
|
||||
readonly
|
||||
disabled
|
||||
id="internalHost"
|
||||
name="internalHost"
|
||||
value={database.id}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<label for="publicPort">{$t('forms.port')}</label>
|
||||
<CopyPasswordField
|
||||
placeholder={$t('database.generated_automatically_after_set_to_public')}
|
||||
|
||||
@@ -75,6 +75,25 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
async function forceDeleteDestination(destination: any) {
|
||||
let sure = confirm($t('application.confirm_to_delete', { name: destination.name }));
|
||||
if (sure) {
|
||||
sure = confirm(
|
||||
'Are you REALLY sure? This will delete all resources associated with this destination, but not on the destination (server) itself. You will have manually delete everything on the server afterwards.'
|
||||
);
|
||||
if (sure) {
|
||||
sure = confirm('REALLY?');
|
||||
if (sure) {
|
||||
try {
|
||||
await del(`/destinations/${destination.id}/force`, { id: destination.id });
|
||||
return await goto('/', { replaceState: true });
|
||||
} catch (error) {
|
||||
return errorNotification(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function deletable() {
|
||||
if (!isDestinationDeletable) {
|
||||
return 'Please delete all resources before deleting this.';
|
||||
@@ -88,7 +107,7 @@
|
||||
</script>
|
||||
|
||||
{#if $page.params.id !== 'new'}
|
||||
<nav class="header lg:flex-row flex-col-reverse">
|
||||
<nav class="header lg:flex-row flex-col-reverse gap-2">
|
||||
<div class="flex flex-row space-x-2 font-bold pt-10 lg:pt-0">
|
||||
<div class="flex flex-col items-center justify-center title">
|
||||
{#if $page.url.pathname === `/destinations/${$page.params.id}`}
|
||||
@@ -111,6 +130,16 @@
|
||||
>
|
||||
<Tooltip triggeredBy="#delete">{deletable()}</Tooltip>
|
||||
</div>
|
||||
<div class="flex flex-row flex-wrap justify-center lg:justify-start lg:py-0 items-center">
|
||||
<button
|
||||
id="forceDelete"
|
||||
on:click={() => forceDeleteDestination(destination)}
|
||||
type="submit"
|
||||
disabled={!$appSession.isAdmin && isDestinationDeletable}
|
||||
class="icons bg-transparent text-sm text-red-500"><DeleteIcon /></button
|
||||
>
|
||||
<Tooltip triggeredBy="#forceDelete">Force Delete</Tooltip>
|
||||
</div>
|
||||
</nav>
|
||||
{/if}
|
||||
<slot />
|
||||
|
||||
@@ -86,7 +86,7 @@
|
||||
readonly
|
||||
class="w-full"
|
||||
value={`${
|
||||
services.find((s) => s.id === storage.containerId).name || storage.containerId
|
||||
services.find((s) => s.id === storage.containerId)?.name || storage.containerId
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
@@ -111,19 +111,18 @@
|
||||
name="containerId"
|
||||
class="w-full lg:w-64"
|
||||
disabled={storage.predefined}
|
||||
readonly={storage.predefined}
|
||||
bind:value={storage.containerId}
|
||||
>
|
||||
{#if services.length === 1}
|
||||
{#if services[0].name}
|
||||
<option selected value={services[0].id}>{services[0].name}</option>
|
||||
<option selected value={services[0].id}>{services[0]?.name}</option>
|
||||
{:else}
|
||||
<option selected value={services[0]}>{services[0]}</option>
|
||||
{/if}
|
||||
{:else}
|
||||
{#each services as service}
|
||||
{#if service.name}
|
||||
<option value={service.id}>{service.name}</option>
|
||||
<option value={service.id}>{service?.name}</option>
|
||||
{:else}
|
||||
<option value={service}>{service}</option>
|
||||
{/if}
|
||||
@@ -157,7 +156,7 @@
|
||||
disabled
|
||||
readonly
|
||||
class="w-full"
|
||||
value={`${services.find((s) => s.id === storage.containerId).name || storage.containerId}`}
|
||||
value={`${services.find((s) => s.id === storage.containerId)?.name || storage.containerId}`}
|
||||
/>
|
||||
<input disabled readonly class="w-full" value={`${storage.volumeName}:${storage.path}`} />
|
||||
<button
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
return true;
|
||||
}
|
||||
});
|
||||
let customVersion: string;
|
||||
$: isDisabled =
|
||||
!$appSession.isAdmin ||
|
||||
$status.service.overallStatus === 'degraded' ||
|
||||
@@ -111,6 +112,7 @@
|
||||
setLocation(service);
|
||||
forceSave = false;
|
||||
$isDeploymentEnabled = checkIfDeploymentEnabledServices(service);
|
||||
customVersion = null;
|
||||
return addToast({
|
||||
message: 'Configuration saved.',
|
||||
type: 'success'
|
||||
@@ -196,26 +198,12 @@
|
||||
async function selectTag(event: any) {
|
||||
service.version = event.detail.value;
|
||||
}
|
||||
async function setCustomVersion() {
|
||||
service.version = customVersion;
|
||||
}
|
||||
onMount(async () => {
|
||||
if (browser && window.location.hostname === 'demo.coolify.io' && !service.fqdn) {
|
||||
service.fqdn = `http://${cuid()}.demo.coolify.io`;
|
||||
// if (service.type === 'wordpress') {
|
||||
// service.wordpress.mysqlDatabase = 'db';
|
||||
// }
|
||||
// if (service.type === 'plausibleanalytics') {
|
||||
// service.plausibleAnalytics.email = 'noreply@demo.com';
|
||||
// service.plausibleAnalytics.username = 'admin';
|
||||
// }
|
||||
// if (service.type === 'minio') {
|
||||
// service.minio.apiFqdn = `http://${cuid()}.demo.coolify.io`;
|
||||
// }
|
||||
// if (service.type === 'ghost') {
|
||||
// service.ghost.mariadbDatabase = 'db';
|
||||
// }
|
||||
// if (service.type === 'fider') {
|
||||
// service.fider.emailNoreply = 'noreply@demo.com';
|
||||
// }
|
||||
// await handleSubmit();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
@@ -224,7 +212,7 @@
|
||||
<form id="saveForm" on:submit|preventDefault={handleSubmit}>
|
||||
<div class="mx-auto w-full">
|
||||
<div class="flex flex-row border-b border-coolgray-500 mb-6 space-x-2">
|
||||
<div class="title font-bold pb-3 ">General</div>
|
||||
<div class="title font-bold pb-3">General</div>
|
||||
{#if $appSession.isAdmin}
|
||||
<button
|
||||
type="submit"
|
||||
@@ -289,6 +277,7 @@
|
||||
</div>
|
||||
<div class="grid grid-cols-2 items-center">
|
||||
<label for="version">Version / Tag</label>
|
||||
<div class="flex gap-2">
|
||||
{#if tags.tags?.length > 0}
|
||||
<div class="custom-select-wrapper w-full">
|
||||
<Select
|
||||
@@ -303,9 +292,11 @@
|
||||
isClearable={false}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
{:else}
|
||||
<input class="w-full border-red-500" disabled placeholder="Error getting tags..." />
|
||||
{/if}
|
||||
{/if}
|
||||
<input class="w-full" placeholder="Custom version" on:change={setCustomVersion} bind:value={customVersion} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-2 items-center">
|
||||
|
||||
@@ -34,7 +34,7 @@ services:
|
||||
networks:
|
||||
- coolify-infra
|
||||
fluent-bit:
|
||||
image: coollabsio/coolify-fluent-bit:1.0.0
|
||||
image: ghcr.io/coollabsio/fluent-bit:1.0.0
|
||||
command: /fluent-bit/bin/fluent-bit -c /fluent-bit/etc/fluent-bit-dev.conf
|
||||
container_name: coolify-fluentbit
|
||||
volumes:
|
||||
|
||||
@@ -2,7 +2,7 @@ version: '3.8'
|
||||
|
||||
services:
|
||||
coolify:
|
||||
image: coollabsio/coolify:${TAG:-latest}
|
||||
image: ghcr.io/coollabsio/coolify:${TAG:-latest}
|
||||
restart: always
|
||||
container_name: coolify
|
||||
ports:
|
||||
@@ -23,7 +23,7 @@ services:
|
||||
networks:
|
||||
- coolify-infra
|
||||
fluent-bit:
|
||||
image: coollabsio/coolify-fluent-bit:1.0.0
|
||||
image: ghcr.io/coollabsio/fluent-bit:1.0.0
|
||||
container_name: coolify-fluentbit
|
||||
volumes:
|
||||
- 'coolify-logs:/app/logs'
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
FROM fluent/fluent-bit:1.9.8
|
||||
COPY ./fluent-bit.conf /fluent-bit/etc/fluent-bit.conf
|
||||
COPY ./fluent-bit-dev.conf /fluent-bit/etc/fluent-bit-dev.conf
|
||||
COPY ./parsers.conf /fluent-bit/etc/parsers.conf
|
||||
@@ -1,30 +0,0 @@
|
||||
[SERVICE]
|
||||
Parsers_file /fluent-bit/etc/parsers.conf
|
||||
Flush 1
|
||||
Grace 30
|
||||
[INPUT]
|
||||
Name http
|
||||
Host 0.0.0.0
|
||||
Port 24224
|
||||
[FILTER]
|
||||
Name parser
|
||||
Match *
|
||||
Key_Name log
|
||||
Parser jsonparser
|
||||
Reserve_Data True
|
||||
[OUTPUT]
|
||||
Name file
|
||||
Match *
|
||||
Path /logs
|
||||
Mkdir true
|
||||
Format csv
|
||||
# [OUTPUT]
|
||||
# Name influxdb
|
||||
# match *
|
||||
# Host coolify-influxdb
|
||||
# Port 8086
|
||||
# Database coolify
|
||||
# Bucket coolify
|
||||
# Org coolify
|
||||
# HTTP_Token 12345678
|
||||
# Sequence_Tag _seq
|
||||
@@ -1,30 +0,0 @@
|
||||
[SERVICE]
|
||||
Parsers_file /fluent-bit/etc/parsers.conf
|
||||
Flush 1
|
||||
Grace 30
|
||||
[INPUT]
|
||||
Name http
|
||||
Host 0.0.0.0
|
||||
Port 24224
|
||||
[FILTER]
|
||||
Name parser
|
||||
Match *
|
||||
Key_Name log
|
||||
Parser jsonparser
|
||||
Reserve_Data True
|
||||
[OUTPUT]
|
||||
Name file
|
||||
Match *
|
||||
Path /app/logs
|
||||
Mkdir true
|
||||
Format csv
|
||||
# [OUTPUT]
|
||||
# Name influxdb
|
||||
# match *
|
||||
# Host coolify-influxdb
|
||||
# Port 8086
|
||||
# Database coolify
|
||||
# Bucket coolify
|
||||
# Org coolify
|
||||
# HTTP_Token 12345678
|
||||
# Sequence_Tag _seq
|
||||
@@ -1,6 +0,0 @@
|
||||
[PARSER]
|
||||
Name jsonparser
|
||||
Format json
|
||||
Time_Key time
|
||||
Time_Format %Y-%m-%dT%H:%M:%S.%L
|
||||
Time_Keep On
|
||||
@@ -1,12 +0,0 @@
|
||||
FROM alpine:3.17
|
||||
ARG BUILDARCH
|
||||
ARG PB_VERSION=0.12.3
|
||||
RUN apk add --no-cache \
|
||||
unzip \
|
||||
ca-certificates
|
||||
|
||||
ADD https://github.com/pocketbase/pocketbase/releases/download/v${PB_VERSION}/pocketbase_${PB_VERSION}_linux_${BUILDARCH}.zip /tmp/pb.zip
|
||||
RUN unzip /tmp/pb.zip -d /app/
|
||||
RUN rm /tmp/pb.zip
|
||||
EXPOSE 8080
|
||||
CMD ["/app/pocketbase", "serve", "--http=0.0.0.0:8080"]
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "coolify",
|
||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||
"version": "3.12.20",
|
||||
"version": "3.12.32",
|
||||
"license": "Apache-2.0",
|
||||
"repository": "github:coollabsio/coolify",
|
||||
"scripts": {
|
||||
@@ -32,7 +32,7 @@
|
||||
"build:api": "NODE_ENV=production pnpm run --filter api build",
|
||||
"build:ui": "NODE_ENV=production pnpm run --filter ui build",
|
||||
"dockerlogin": "echo $DOCKER_PASS | docker login --username=$DOCKER_USER --password-stdin",
|
||||
"release:staging:amd": "docker build -t coollabsio/coolify:next . && docker push coollabsio/coolify:next",
|
||||
"release:staging:amd": "docker build -t ghcr.io/coollabsio/coolify:next . && docker push ghcr.io/coollabsio/coolify:next",
|
||||
"release:local": "rm -fr ./local-serve && mkdir ./local-serve && pnpm build && cp -Rp apps/api/build/* ./local-serve && cp -Rp apps/ui/build/ ./local-serve/public && cp -Rp apps/api/prisma/ ./local-serve/prisma && cp -Rp apps/api/package.json ./local-serve && env | grep '^COOLIFY_' > ./local-serve/.env && cd ./local-serve && pnpm install . && pnpm start"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -50,4 +50,4 @@
|
||||
"open-source",
|
||||
"coolify"
|
||||
]
|
||||
}
|
||||
}
|
||||
2592
pnpm-lock.yaml
generated
2592
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user