mirror of
https://github.com/ershisan99/coolify.git
synced 2026-01-09 05:02:16 +00:00
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e007a773fd | ||
|
|
e2821118eb | ||
|
|
4c8e73ac86 | ||
|
|
cb980fb814 | ||
|
|
41c84e3642 | ||
|
|
2bad98424f | ||
|
|
bc6b1e2dea | ||
|
|
911c15d1be | ||
|
|
f79d570870 | ||
|
|
7fffa9fba5 | ||
|
|
cbd634fb99 | ||
|
|
7ae7436d4f | ||
|
|
641bada100 | ||
|
|
3416d8d88e | ||
|
|
0bb503368b | ||
|
|
ac3a77c3c7 | ||
|
|
79b4178d76 | ||
|
|
42a61296d7 | ||
|
|
e8088e2a70 | ||
|
|
c4d39aced2 | ||
|
|
b40a5adeb0 | ||
|
|
558a900620 | ||
|
|
6b5e5a504d | ||
|
|
e44dca2464 | ||
|
|
e1f84b277a | ||
|
|
2518f46b08 | ||
|
|
01e18a9496 | ||
|
|
564ca709d3 | ||
|
|
a54a36ae18 | ||
|
|
43603b0961 | ||
|
|
96cd99f904 | ||
|
|
3438d10e25 | ||
|
|
022ccb42a1 | ||
|
|
e6d72e9f87 | ||
|
|
06e8a6af23 | ||
|
|
ac188d137a | ||
|
|
cae466745a | ||
|
|
d61f16dab0 | ||
|
|
02ba277a86 | ||
|
|
470ff49a02 | ||
|
|
04d741581d | ||
|
|
038f210148 | ||
|
|
2adad3a7bd | ||
|
|
05fb26a49b | ||
|
|
1c237affb4 | ||
|
|
3e81d7e9cb | ||
|
|
edb66620c1 | ||
|
|
04f7e8e777 | ||
|
|
eee201013c | ||
|
|
1190cb4ea1 | ||
|
|
507100ea0b | ||
|
|
9b13912b6d |
@@ -1,12 +1,9 @@
|
|||||||
name: fluent-bit-release
|
name: Production Release to DockerHub
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths:
|
branches:
|
||||||
- "others/fluentbit"
|
- "this-branch-does-not-exists"
|
||||||
- ".github/workflows/fluent-bit-release.yml"
|
|
||||||
branches:
|
|
||||||
- next
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
arm64:
|
arm64:
|
||||||
@@ -23,13 +20,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: others/fluentbit/
|
context: .
|
||||||
platforms: linux/arm64
|
platforms: linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify-fluent-bit:1.0.0-arm64
|
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
||||||
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
||||||
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
||||||
amd64:
|
amd64:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -44,13 +46,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: others/fluentbit/
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify-fluent-bit:1.0.0-amd64
|
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||||
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
||||||
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
||||||
aarch64:
|
aarch64:
|
||||||
runs-on: [self-hosted, arm64]
|
runs-on: [self-hosted, arm64]
|
||||||
steps:
|
steps:
|
||||||
@@ -65,13 +72,18 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: others/fluentbit/
|
context: .
|
||||||
platforms: linux/aarch64
|
platforms: linux/aarch64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
||||||
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
|
||||||
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
|
||||||
merge-manifest:
|
merge-manifest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [amd64, arm64, aarch64]
|
needs: [amd64, arm64, aarch64]
|
||||||
@@ -87,7 +99,14 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Create & publish manifest
|
- name: Create & publish manifest
|
||||||
run: |
|
run: |
|
||||||
docker manifest create coollabsio/coolify-fluent-bit:1.0.0 --amend coollabsio/coolify-fluent-bit:1.0.0-amd64 --amend coollabsio/coolify-fluent-bit:1.0.0-arm64 --amend coollabsio/coolify-fluent-bit:1.0.0-aarch64
|
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
||||||
docker manifest push coollabsio/coolify-fluent-bit:1.0.0
|
docker buildx imagetools create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --tag coollabsio/coolify:latest
|
||||||
|
- uses: sarisia/actions-status-discord@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
webhook: ${{ secrets.DISCORD_WEBHOOK_PROD_RELEASE_CHANNEL }}
|
||||||
103
.github/workflows/production-release.yml
vendored
103
.github/workflows/production-release.yml
vendored
@@ -1,36 +1,14 @@
|
|||||||
name: production-release
|
name: Production Release to ghcr.io
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [released]
|
types: [released]
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: "coollabsio/coolify"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
arm64:
|
|
||||||
runs-on: [self-hosted, arm64]
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Get current package version
|
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
|
||||||
id: package-version
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64
|
|
||||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64
|
|
||||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max
|
|
||||||
amd64:
|
amd64:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -40,23 +18,27 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: Login to DockerHub
|
- name: Login to ghcr.io
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ${{ env.REGISTRY }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.actor }}
|
||||||
- name: Get current package version
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
- name: Extract metadata
|
||||||
id: package-version
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}}
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max
|
|
||||||
aarch64:
|
aarch64:
|
||||||
runs-on: [self-hosted, arm64]
|
runs-on: [self-hosted, arm64]
|
||||||
steps:
|
steps:
|
||||||
@@ -66,26 +48,30 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
- name: Login to DockerHub
|
- name: Login to ghcr.io
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ${{ env.REGISTRY }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.actor }}
|
||||||
- name: Get current package version
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
- name: Extract metadata
|
||||||
id: package-version
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}}-aarch64
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/aarch64
|
platforms: linux/aarch64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64
|
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max
|
|
||||||
merge-manifest:
|
merge-manifest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [amd64, arm64, aarch64]
|
needs: [amd64, aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -93,18 +79,23 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: Login to DockerHub
|
- name: Login to ghcr.io
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ${{ env.REGISTRY }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.actor }}
|
||||||
- name: Get current package version
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
- name: Extract metadata
|
||||||
id: package-version
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=semver,pattern={{version}}
|
||||||
- name: Create & publish manifest
|
- name: Create & publish manifest
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}}
|
docker buildx imagetools create --append ${{ fromJSON(steps.meta.outputs.json).tags[0] }}-aarch64 --tag ${{ fromJSON(steps.meta.outputs.json).tags[0] }}
|
||||||
docker buildx imagetools create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --tag coollabsio/coolify:latest
|
docker buildx imagetools create --append ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-aarch64 --tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
- uses: sarisia/actions-status-discord@v1
|
- uses: sarisia/actions-status-discord@v1
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
|
|||||||
110
.github/workflows/release-candidate.yml
vendored
Normal file
110
.github/workflows/release-candidate.yml
vendored
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
name: Release Candidate to ghcr.io
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [prereleased]
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: "coollabsio/coolify"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to ghcr.io
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
aarch64:
|
||||||
|
runs-on: [self-hosted, arm64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v1
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v1
|
||||||
|
- name: Login to ghcr.io
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: linux/aarch64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
merge-manifest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [amd64, aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to ghcr.io
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
- name: Create & publish manifest
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }}
|
||||||
|
- uses: sarisia/actions-status-discord@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||||
@@ -1,19 +1,18 @@
|
|||||||
name: pocketbase-release
|
name: Staging Release to DockerHub
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths:
|
|
||||||
- "others/pocketbase/*"
|
|
||||||
- ".github/workflows/pocketbase-release.yml"
|
|
||||||
branches:
|
branches:
|
||||||
- next
|
- "this-branch-does-not-exists"
|
||||||
- main
|
|
||||||
jobs:
|
jobs:
|
||||||
arm64:
|
arm64:
|
||||||
runs-on: [self-hosted, arm64]
|
runs-on: [self-hosted, arm64]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: "next"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@@ -23,18 +22,25 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: others/pocketbase/
|
context: .
|
||||||
platforms: linux/arm64
|
platforms: linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/pocketbase:0.12.3-arm64
|
tags: coollabsio/coolify:next-arm64
|
||||||
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
||||||
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
||||||
amd64:
|
amd64:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: "next"
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
@@ -44,37 +50,21 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Get current package version
|
||||||
|
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
||||||
|
id: package-version
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: others/pocketbase/
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/pocketbase:0.12.3-amd64
|
tags: coollabsio/coolify:next
|
||||||
aarch64:
|
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
||||||
runs-on: [self-hosted, arm64]
|
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: others/pocketbase/
|
|
||||||
platforms: linux/aarch64
|
|
||||||
push: true
|
|
||||||
tags: coollabsio/pocketbase:0.12.3-aarch64
|
|
||||||
merge-manifest:
|
merge-manifest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [amd64, arm64, aarch64]
|
needs: [arm64, amd64]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -89,5 +79,8 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Create & publish manifest
|
- name: Create & publish manifest
|
||||||
run: |
|
run: |
|
||||||
docker manifest create coollabsio/pocketbase:0.12.3 --amend coollabsio/pocketbase:0.12.3-amd64 --amend coollabsio/pocketbase:0.12.3-arm64 --amend coollabsio/pocketbase:0.12.3-aarch64
|
docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
|
||||||
docker manifest push coollabsio/pocketbase:0.12.3
|
- uses: sarisia/actions-status-discord@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }}
|
||||||
117
.github/workflows/staging-release.yml
vendored
117
.github/workflows/staging-release.yml
vendored
@@ -1,76 +1,77 @@
|
|||||||
name: staging-release
|
name: Staging Release to ghcr.io
|
||||||
|
concurrency:
|
||||||
|
group: staging_environment
|
||||||
|
cancel-in-progress: true
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths:
|
branches-ignore:
|
||||||
- "**"
|
- "main"
|
||||||
- "!others/fluentbit"
|
- "v4"
|
||||||
- "!others/pocketbase"
|
env:
|
||||||
- "!.github/workflows/fluent-bit-release.yml"
|
REGISTRY: ghcr.io
|
||||||
- "!.github/workflows/pocketbase-release.yml"
|
IMAGE_NAME: "coollabsio/coolify"
|
||||||
branches:
|
|
||||||
- next
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
arm64:
|
|
||||||
runs-on: [self-hosted, arm64]
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: "next"
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
- name: Get current package version
|
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
|
||||||
id: package-version
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/arm64
|
|
||||||
push: true
|
|
||||||
tags: coollabsio/coolify:next-arm64
|
|
||||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64
|
|
||||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max
|
|
||||||
amd64:
|
amd64:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: "next"
|
ref: "next"
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: Login to DockerHub
|
- name: Login to ghcr.io
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ${{ env.REGISTRY }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.actor }}
|
||||||
- name: Get current package version
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: martinbeentjes/npm-get-version-action@v1.2.3
|
- name: Extract metadata (tags, labels)
|
||||||
id: package-version
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
push: true
|
push: true
|
||||||
tags: coollabsio/coolify:next
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max
|
aarch64:
|
||||||
|
runs-on:
|
||||||
|
group: aarch-runners
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: "next"
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
- name: Login to ghcr.io
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Extract metadata (tags, labels)
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
platforms: linux/aarch64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}-aarch64
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
merge-manifest:
|
merge-manifest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [arm64, amd64]
|
needs: [amd64, aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -78,14 +79,20 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: Login to DockerHub
|
- name: Login to ghcr.io
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
registry: ${{ env.REGISTRY }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Extract metadata (tags, labels)
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
- name: Create & publish manifest
|
- name: Create & publish manifest
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next
|
docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }}
|
||||||
- uses: sarisia/actions-status-discord@v1
|
- uses: sarisia/actions-status-discord@v1
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ RUN curl -SL https://cdn.coollabs.io/bin/$TARGETPLATFORM/pack-$PACK_VERSION -o /
|
|||||||
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
RUN chmod +x ~/.docker/cli-plugins/docker-compose /usr/bin/docker /usr/local/bin/pack
|
||||||
|
|
||||||
COPY --from=build /app/apps/api/build/ .
|
COPY --from=build /app/apps/api/build/ .
|
||||||
COPY --from=build /app/others/fluentbit/ ./fluentbit
|
# COPY --from=build /app/others/fluentbit/ ./fluentbit
|
||||||
COPY --from=build /app/apps/ui/build/ ./public
|
COPY --from=build /app/apps/ui/build/ ./public
|
||||||
COPY --from=build /app/apps/api/prisma/ ./prisma
|
COPY --from=build /app/apps/api/prisma/ ./prisma
|
||||||
COPY --from=build /app/apps/api/package.json .
|
COPY --from=build /app/apps/api/package.json .
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ Deploy your resource to:
|
|||||||
|
|
||||||
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
|
- Mastodon: [@andrasbacsai@fosstodon.org](https://fosstodon.org/@andrasbacsai)
|
||||||
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
- Telegram: [@andrasbacsai](https://t.me/andrasbacsai)
|
||||||
- Twitter: [@andrasbacsai](https://twitter.com/andrasbacsai)
|
- Twitter: [@andrasbacsai](https://twitter.com/heyandras)
|
||||||
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
- Email: [andras@coollabs.io](mailto:andras@coollabs.io)
|
||||||
- Discord: [Invitation](https://coollabs.io/discord)
|
- Discord: [Invitation](https://coollabs.io/discord)
|
||||||
|
|
||||||
|
|||||||
@@ -230,7 +230,7 @@
|
|||||||
description: "Open Source realtime backend in 1 file"
|
description: "Open Source realtime backend in 1 file"
|
||||||
services:
|
services:
|
||||||
$$id:
|
$$id:
|
||||||
image: coollabsio/pocketbase:$$core_version
|
image: ghcr.io/coollabsio/pocketbase:$$core_version
|
||||||
volumes:
|
volumes:
|
||||||
- $$id-data:/app/pb_data
|
- $$id-data:/app/pb_data
|
||||||
ports:
|
ports:
|
||||||
@@ -414,6 +414,7 @@
|
|||||||
proxy:
|
proxy:
|
||||||
- port: "22"
|
- port: "22"
|
||||||
hostPort: $$config_hostport_ssh
|
hostPort: $$config_hostport_ssh
|
||||||
|
- port: "3000"
|
||||||
variables:
|
variables:
|
||||||
- id: $$config_hostport_ssh
|
- id: $$config_hostport_ssh
|
||||||
name: SSH_PORT
|
name: SSH_PORT
|
||||||
|
|||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "ApplicationPersistentStorage" ADD COLUMN "hostPath" TEXT;
|
||||||
@@ -195,6 +195,7 @@ model ApplicationSettings {
|
|||||||
model ApplicationPersistentStorage {
|
model ApplicationPersistentStorage {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
applicationId String
|
applicationId String
|
||||||
|
hostPath String?
|
||||||
path String
|
path String
|
||||||
oldPath Boolean @default(false)
|
oldPath Boolean @default(false)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
|
|||||||
@@ -306,7 +306,7 @@ async function initServer() {
|
|||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
try {
|
try {
|
||||||
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
console.log('[003] Cleaning up old build sources under /tmp/build-sources/...');
|
||||||
await fs.rm('/tmp/build-sources', { recursive: true, force: true });
|
if (!isDev) await fs.rm('/tmp/build-sources', { recursive: true, force: true });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
}
|
}
|
||||||
@@ -402,14 +402,14 @@ async function autoUpdater() {
|
|||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
if (isAutoUpdateEnabled) {
|
if (isAutoUpdateEnabled) {
|
||||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
await executeCommand({ command: `docker pull ghcr.io/coollabsio/coolify:${latestVersion}` });
|
||||||
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` });
|
||||||
await executeCommand({
|
await executeCommand({
|
||||||
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||||
});
|
});
|
||||||
await executeCommand({
|
await executeCommand({
|
||||||
shell: true,
|
shell: true,
|
||||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ghcr.io/coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -110,6 +110,9 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
.replace(/\//gi, '-')
|
.replace(/\//gi, '-')
|
||||||
.replace('-app', '')}:${storage.path}`;
|
.replace('-app', '')}:${storage.path}`;
|
||||||
}
|
}
|
||||||
|
if (storage.hostPath) {
|
||||||
|
return `${storage.hostPath}:${storage.path}`
|
||||||
|
}
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
}) || [];
|
}) || [];
|
||||||
|
|
||||||
@@ -160,7 +163,11 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
port: exposePort ? `${exposePort}:${port}` : port
|
port: exposePort ? `${exposePort}:${port}` : port
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
const composeVolumes = volumes.map((volume) => {
|
const composeVolumes = volumes.filter(v => {
|
||||||
|
if (!v.startsWith('.') && !v.startsWith('..') && !v.startsWith('/') && !v.startsWith('~')) {
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
}).map((volume) => {
|
||||||
return {
|
return {
|
||||||
[`${volume.split(':')[0]}`]: {
|
[`${volume.split(':')[0]}`]: {
|
||||||
name: volume.split(':')[0]
|
name: volume.split(':')[0]
|
||||||
@@ -233,7 +240,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
applicationId: application.id
|
applicationId: application.id
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await fs.rm(workdir, { recursive: true, force: true });
|
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -256,7 +263,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
await fs.rm(workdir, { recursive: true, force: true });
|
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||||
await prisma.build.update({
|
await prisma.build.update({
|
||||||
where: { id: buildId },
|
where: { id: buildId },
|
||||||
data: { status: 'success' }
|
data: { status: 'success' }
|
||||||
@@ -381,6 +388,9 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
.replace(/\//gi, '-')
|
.replace(/\//gi, '-')
|
||||||
.replace('-app', '')}:${storage.path}`;
|
.replace('-app', '')}:${storage.path}`;
|
||||||
}
|
}
|
||||||
|
if (storage.hostPath) {
|
||||||
|
return `${storage.hostPath}:${storage.path}`
|
||||||
|
}
|
||||||
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`;
|
||||||
}) || [];
|
}) || [];
|
||||||
|
|
||||||
@@ -406,7 +416,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
installCommand = configuration.installCommand;
|
installCommand = configuration.installCommand;
|
||||||
startCommand = configuration.startCommand;
|
startCommand = configuration.startCommand;
|
||||||
buildCommand = configuration.buildCommand;
|
buildCommand = configuration.buildCommand;
|
||||||
publishDirectory = configuration.publishDirectory;
|
publishDirectory = configuration.publishDirectory || '';
|
||||||
baseDirectory = configuration.baseDirectory || '';
|
baseDirectory = configuration.baseDirectory || '';
|
||||||
dockerFileLocation = configuration.dockerFileLocation;
|
dockerFileLocation = configuration.dockerFileLocation;
|
||||||
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
dockerComposeFileLocation = configuration.dockerComposeFileLocation;
|
||||||
@@ -691,7 +701,11 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
await saveDockerRegistryCredentials({ url, username, password, workdir });
|
await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const composeVolumes = volumes.map((volume) => {
|
const composeVolumes = volumes.filter(v => {
|
||||||
|
if (!v.startsWith('.') && !v.startsWith('..') && !v.startsWith('/') && !v.startsWith('~')) {
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
}).map((volume) => {
|
||||||
return {
|
return {
|
||||||
[`${volume.split(':')[0]}`]: {
|
[`${volume.split(':')[0]}`]: {
|
||||||
name: volume.split(':')[0]
|
name: volume.split(':')[0]
|
||||||
@@ -768,7 +782,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
applicationId: application.id
|
applicationId: application.id
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
await fs.rm(workdir, { recursive: true, force: true });
|
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -789,7 +803,7 @@ import * as buildpacks from '../lib/buildPacks';
|
|||||||
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
await saveBuildLog({ line: error.stderr, buildId, applicationId });
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
await fs.rm(workdir, { recursive: true, force: true });
|
if (!isDev) await fs.rm(workdir, { recursive: true, force: true });
|
||||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
await prisma.build.update({ where: { id: buildId }, data: { status: 'success' } });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -804,6 +804,7 @@ export async function buildCacheImageWithNode(data, imageForBuild) {
|
|||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
await buildImage({ ...data, isCache: true });
|
await buildImage({ ...data, isCache: true });
|
||||||
}
|
}
|
||||||
@@ -821,6 +822,7 @@ export async function buildCacheImageForLaravel(data, imageForBuild) {
|
|||||||
}
|
}
|
||||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||||
Dockerfile.push(`COPY resources /app/resources`);
|
Dockerfile.push(`COPY resources /app/resources`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
await buildImage({ ...data, isCache: true });
|
await buildImage({ ...data, isCache: true });
|
||||||
@@ -842,6 +844,7 @@ export async function buildCacheImageWithCargo(data, imageForBuild) {
|
|||||||
Dockerfile.push('RUN cargo install cargo-chef');
|
Dockerfile.push('RUN cargo install cargo-chef');
|
||||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||||
await buildImage({ ...data, isCache: true });
|
await buildImage({ ...data, isCache: true });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,17 +36,23 @@ export default async function (data) {
|
|||||||
if (volumes.length > 0) {
|
if (volumes.length > 0) {
|
||||||
for (const volume of volumes) {
|
for (const volume of volumes) {
|
||||||
let [v, path] = volume.split(':');
|
let [v, path] = volume.split(':');
|
||||||
composeVolumes[v] = {
|
if (!v.startsWith('.') && !v.startsWith('..') && !v.startsWith('/') && !v.startsWith('~')) {
|
||||||
name: v
|
composeVolumes[v] = {
|
||||||
};
|
name: v
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let networks = {};
|
let networks = {};
|
||||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||||
value['container_name'] = `${applicationId}-${key}`;
|
value['container_name'] = `${applicationId}-${key}`;
|
||||||
|
|
||||||
|
if (value['env_file']) {
|
||||||
|
delete value['env_file'];
|
||||||
|
}
|
||||||
|
|
||||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
|
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
|
||||||
|
console.log({ key, environment });
|
||||||
if (Object.keys(environment).length > 0) {
|
if (Object.keys(environment).length > 0) {
|
||||||
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
|
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
|
||||||
}
|
}
|
||||||
@@ -59,7 +65,7 @@ export default async function (data) {
|
|||||||
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
|
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
|
||||||
let finalArgs = [...buildEnvs];
|
let finalArgs = [...buildEnvs];
|
||||||
if (Object.keys(buildArgs).length > 0) {
|
if (Object.keys(buildArgs).length > 0) {
|
||||||
for (const arg of buildArgs) {
|
for (const arg of Object.keys(buildArgs)) {
|
||||||
const [key, _] = arg.split('=');
|
const [key, _] = arg.split('=');
|
||||||
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
|
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
|
||||||
finalArgs.push(arg);
|
finalArgs.push(arg);
|
||||||
@@ -77,17 +83,57 @@ export default async function (data) {
|
|||||||
// TODO: If we support separated volume for each service, we need to add it here
|
// TODO: If we support separated volume for each service, we need to add it here
|
||||||
if (value['volumes']?.length > 0) {
|
if (value['volumes']?.length > 0) {
|
||||||
value['volumes'] = value['volumes'].map((volume) => {
|
value['volumes'] = value['volumes'].map((volume) => {
|
||||||
let [v, path, permission] = volume.split(':');
|
if (typeof volume === 'string') {
|
||||||
if (!path) {
|
let [v, path, permission] = volume.split(':');
|
||||||
path = v;
|
if (
|
||||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
v.startsWith('.') ||
|
||||||
} else {
|
v.startsWith('..') ||
|
||||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
v.startsWith('/') ||
|
||||||
|
v.startsWith('~') ||
|
||||||
|
v.startsWith('$PWD')
|
||||||
|
) {
|
||||||
|
v = v
|
||||||
|
.replace(/^\./, `~`)
|
||||||
|
.replace(/^\.\./, '~')
|
||||||
|
.replace(/^\$PWD/, '~');
|
||||||
|
} else {
|
||||||
|
if (!path) {
|
||||||
|
path = v;
|
||||||
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
} else {
|
||||||
|
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
}
|
||||||
|
composeVolumes[v] = {
|
||||||
|
name: v
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||||
|
}
|
||||||
|
if (typeof volume === 'object') {
|
||||||
|
let { source, target, mode } = volume;
|
||||||
|
if (
|
||||||
|
source.startsWith('.') ||
|
||||||
|
source.startsWith('..') ||
|
||||||
|
source.startsWith('/') ||
|
||||||
|
source.startsWith('~') ||
|
||||||
|
source.startsWith('$PWD')
|
||||||
|
) {
|
||||||
|
source = source
|
||||||
|
.replace(/^\./, `~`)
|
||||||
|
.replace(/^\.\./, '~')
|
||||||
|
.replace(/^\$PWD/, '~');
|
||||||
|
console.log({ source });
|
||||||
|
} else {
|
||||||
|
if (!target) {
|
||||||
|
target = source;
|
||||||
|
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
} else {
|
||||||
|
source = `${applicationId}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${source}:${target}${mode ? ':' + mode : ''}`;
|
||||||
}
|
}
|
||||||
composeVolumes[v] = {
|
|
||||||
name: v
|
|
||||||
};
|
|
||||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (volumes.length > 0) {
|
if (volumes.length > 0) {
|
||||||
@@ -98,16 +144,20 @@ export default async function (data) {
|
|||||||
if (dockerComposeConfiguration[key]?.port) {
|
if (dockerComposeConfiguration[key]?.port) {
|
||||||
value['expose'] = [dockerComposeConfiguration[key].port];
|
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||||
}
|
}
|
||||||
if (value['networks']?.length > 0) {
|
value['networks'] = [network];
|
||||||
value['networks'].forEach((network) => {
|
if (value['build']?.network) {
|
||||||
networks[network] = {
|
delete value['build']['network'];
|
||||||
name: network
|
|
||||||
};
|
|
||||||
});
|
|
||||||
value['networks'] = [...(value['networks'] || ''), network];
|
|
||||||
} else {
|
|
||||||
value['networks'] = [network];
|
|
||||||
}
|
}
|
||||||
|
// if (value['networks']?.length > 0) {
|
||||||
|
// value['networks'].forEach((network) => {
|
||||||
|
// networks[network] = {
|
||||||
|
// name: network
|
||||||
|
// };
|
||||||
|
// });
|
||||||
|
// value['networks'] = [...(value['networks'] || ''), network];
|
||||||
|
// } else {
|
||||||
|
// value['networks'] = [network];
|
||||||
|
// }
|
||||||
|
|
||||||
dockerComposeYaml.services[key] = {
|
dockerComposeYaml.services[key] = {
|
||||||
...dockerComposeYaml.services[key],
|
...dockerComposeYaml.services[key],
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||||
Dockerfile.push(`ENV NO_COLOR true`);
|
Dockerfile.push(`ENV NO_COLOR true`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ export default async function (data) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
Dockerfile.push(`RUN rm -fr .git`);
|
||||||
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||||
await buildImage(data);
|
await buildImage(data);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
|||||||
if (baseImage?.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||||
);
|
);
|
||||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
} else if (deploymentType === 'static') {
|
} else if (deploymentType === 'static') {
|
||||||
@@ -43,6 +44,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE 80`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||||
Dockerfile.push(`RUN ${installCommand}`);
|
Dockerfile.push(`RUN ${installCommand}`);
|
||||||
Dockerfile.push(`RUN ${buildCommand}`);
|
Dockerfile.push(`RUN ${buildCommand}`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD ${startCommand}`);
|
Dockerfile.push(`CMD ${startCommand}`);
|
||||||
} else if (deploymentType === 'static') {
|
} else if (deploymentType === 'static') {
|
||||||
@@ -43,6 +44,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app${publishDirectory} ./`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE 80`);
|
Dockerfile.push(`EXPOSE 80`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
} else {
|
} else {
|
||||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (baseImage?.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ const createDockerfile = async (data, image, name): Promise<void> => {
|
|||||||
);
|
);
|
||||||
Dockerfile.push(`RUN update-ca-certificates`);
|
Dockerfile.push(`RUN update-ca-certificates`);
|
||||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (baseImage?.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (baseImage?.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ const createDockerfile = async (data, image): Promise<void> => {
|
|||||||
if (baseImage?.includes('nginx')) {
|
if (baseImage?.includes('nginx')) {
|
||||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||||
}
|
}
|
||||||
|
Dockerfile.push('RUN rm -fr .git');
|
||||||
Dockerfile.push(`EXPOSE ${port}`);
|
Dockerfile.push(`EXPOSE ${port}`);
|
||||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -11,15 +11,15 @@ import { promises as dns } from 'dns';
|
|||||||
import * as Sentry from '@sentry/node';
|
import * as Sentry from '@sentry/node';
|
||||||
import { PrismaClient } from '@prisma/client';
|
import { PrismaClient } from '@prisma/client';
|
||||||
import os from 'os';
|
import os from 'os';
|
||||||
import sshConfig from 'ssh-config';
|
import * as SSHConfig from 'ssh-config/src/ssh-config';
|
||||||
import jsonwebtoken from 'jsonwebtoken';
|
import jsonwebtoken from 'jsonwebtoken';
|
||||||
import { checkContainer, removeContainer } from './docker';
|
import { checkContainer, removeContainer } from './docker';
|
||||||
import { day } from './dayjs';
|
import { day } from './dayjs';
|
||||||
import { saveBuildLog, saveDockerRegistryCredentials } from './buildPacks/common';
|
import { saveBuildLog } from './buildPacks/common';
|
||||||
import { scheduler } from './scheduler';
|
import { scheduler } from './scheduler';
|
||||||
import type { ExecaChildProcess } from 'execa';
|
import type { ExecaChildProcess } from 'execa';
|
||||||
|
|
||||||
export const version = '3.12.24';
|
export const version = '3.12.30';
|
||||||
export const isDev = process.env.NODE_ENV === 'development';
|
export const isDev = process.env.NODE_ENV === 'development';
|
||||||
export const proxyPort = process.env.COOLIFY_PROXY_PORT;
|
export const proxyPort = process.env.COOLIFY_PROXY_PORT;
|
||||||
export const proxySecurePort = process.env.COOLIFY_PROXY_SECURE_PORT;
|
export const proxySecurePort = process.env.COOLIFY_PROXY_SECURE_PORT;
|
||||||
@@ -402,8 +402,8 @@ export const supportedDatabaseTypesAndVersions = [
|
|||||||
fancyName: 'MongoDB',
|
fancyName: 'MongoDB',
|
||||||
baseImage: 'bitnami/mongodb',
|
baseImage: 'bitnami/mongodb',
|
||||||
baseImageARM: 'mongo',
|
baseImageARM: 'mongo',
|
||||||
versions: ['5.0', '4.4', '4.2'],
|
versions: ['6.0', '5.0', '4.4', '4.2'],
|
||||||
versionsARM: ['5.0', '4.4', '4.2']
|
versionsARM: ['6.0', '5.0', '4.4', '4.2']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'mysql',
|
name: 'mysql',
|
||||||
@@ -418,16 +418,16 @@ export const supportedDatabaseTypesAndVersions = [
|
|||||||
fancyName: 'MariaDB',
|
fancyName: 'MariaDB',
|
||||||
baseImage: 'bitnami/mariadb',
|
baseImage: 'bitnami/mariadb',
|
||||||
baseImageARM: 'mariadb',
|
baseImageARM: 'mariadb',
|
||||||
versions: ['10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2'],
|
versions: ['10.11', '10.10', '10.9', '10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2'],
|
||||||
versionsARM: ['10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2']
|
versionsARM: ['10.11', '10.10', '10.9', '10.8', '10.7', '10.6', '10.5', '10.4', '10.3', '10.2']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'postgresql',
|
name: 'postgresql',
|
||||||
fancyName: 'PostgreSQL',
|
fancyName: 'PostgreSQL',
|
||||||
baseImage: 'bitnami/postgresql',
|
baseImage: 'bitnami/postgresql',
|
||||||
baseImageARM: 'postgres',
|
baseImageARM: 'postgres',
|
||||||
versions: ['14.5.0', '13.8.0', '12.12.0', '11.17.0', '10.22.0'],
|
versions: ['15.2.0', '14.7.0', '14.5.0', '13.8.0', '12.12.0', '11.17.0', '10.22.0'],
|
||||||
versionsARM: ['14.5', '13.8', '12.12', '11.17', '10.22']
|
versionsARM: ['15.2', '14.7', '14.5', '13.8', '12.12', '11.17', '10.22']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'redis',
|
name: 'redis',
|
||||||
@@ -442,14 +442,14 @@ export const supportedDatabaseTypesAndVersions = [
|
|||||||
fancyName: 'CouchDB',
|
fancyName: 'CouchDB',
|
||||||
baseImage: 'bitnami/couchdb',
|
baseImage: 'bitnami/couchdb',
|
||||||
baseImageARM: 'couchdb',
|
baseImageARM: 'couchdb',
|
||||||
versions: ['3.2.2', '3.1.2', '2.3.1'],
|
versions: ['3.3.1', '3.2.2', '3.1.2', '2.3.1'],
|
||||||
versionsARM: ['3.2.2', '3.1.2', '2.3.1']
|
versionsARM: ['3.3', '3.2.2', '3.1.2', '2.3.1']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'edgedb',
|
name: 'edgedb',
|
||||||
fancyName: 'EdgeDB',
|
fancyName: 'EdgeDB',
|
||||||
baseImage: 'edgedb/edgedb',
|
baseImage: 'edgedb/edgedb',
|
||||||
versions: ['latest', '2.1', '2.0', '1.4']
|
versions: ['latest', '2.9', '2.8', '2.7']
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -498,33 +498,56 @@ export async function getFreeSSHLocalPort(id: string): Promise<number | boolean>
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the ssh config file with a host
|
||||||
|
*
|
||||||
|
* @param id Destination ID
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
export async function createRemoteEngineConfiguration(id: string) {
|
export async function createRemoteEngineConfiguration(id: string) {
|
||||||
const homedir = os.homedir();
|
|
||||||
const sshKeyFile = `/tmp/id_rsa-${id}`;
|
const sshKeyFile = `/tmp/id_rsa-${id}`;
|
||||||
const localPort = await getFreeSSHLocalPort(id);
|
const localPort = await getFreeSSHLocalPort(id);
|
||||||
const {
|
const {
|
||||||
sshKey: { privateKey },
|
sshKey: { privateKey },
|
||||||
network,
|
|
||||||
remoteIpAddress,
|
remoteIpAddress,
|
||||||
remotePort,
|
remotePort,
|
||||||
remoteUser
|
remoteUser
|
||||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||||
|
|
||||||
|
// Write new keyfile
|
||||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||||
const config = sshConfig.parse('');
|
|
||||||
const Host = `${remoteIpAddress}-remote`;
|
const Host = `${remoteIpAddress}-remote`;
|
||||||
|
|
||||||
|
// Removes previous ssh-keys
|
||||||
try {
|
try {
|
||||||
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||||
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||||
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||||
} catch (error) { }
|
} catch (error) {
|
||||||
|
//
|
||||||
|
}
|
||||||
|
|
||||||
|
const homedir = os.homedir();
|
||||||
|
let currentConfigFileContent = '';
|
||||||
|
try {
|
||||||
|
// Read the current config file
|
||||||
|
currentConfigFileContent = (await fs.readFile(`${homedir}/.ssh/config`)).toString();
|
||||||
|
} catch (error) {
|
||||||
|
// File doesn't exist, so we do nothing, a new one is going to be created
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the config file
|
||||||
|
const config = SSHConfig.parse(currentConfigFileContent);
|
||||||
|
|
||||||
|
// Remove current config for the given host
|
||||||
const found = config.find({ Host });
|
const found = config.find({ Host });
|
||||||
const foundIp = config.find({ Host: remoteIpAddress });
|
const foundIp = config.find({ Host: remoteIpAddress });
|
||||||
|
|
||||||
if (found) config.remove({ Host });
|
if (found) config.remove({ Host });
|
||||||
if (foundIp) config.remove({ Host: remoteIpAddress });
|
if (foundIp) config.remove({ Host: remoteIpAddress });
|
||||||
|
|
||||||
|
// Create the new config
|
||||||
config.append({
|
config.append({
|
||||||
Host,
|
Host,
|
||||||
Hostname: remoteIpAddress,
|
Hostname: remoteIpAddress,
|
||||||
@@ -537,13 +560,17 @@ export async function createRemoteEngineConfiguration(id: string) {
|
|||||||
ControlPersist: '10m'
|
ControlPersist: '10m'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Check if .ssh folder exists, and if not create one
|
||||||
try {
|
try {
|
||||||
await fs.stat(`${homedir}/.ssh/`);
|
await fs.stat(`${homedir}/.ssh/`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await fs.mkdir(`${homedir}/.ssh/`);
|
await fs.mkdir(`${homedir}/.ssh/`);
|
||||||
}
|
}
|
||||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
|
||||||
|
// Write the config
|
||||||
|
return await fs.writeFile(`${homedir}/.ssh/config`, SSHConfig.stringify(config));
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function executeCommand({
|
export async function executeCommand({
|
||||||
command,
|
command,
|
||||||
dockerId = null,
|
dockerId = null,
|
||||||
@@ -822,97 +849,97 @@ export function generatePassword({
|
|||||||
|
|
||||||
type DatabaseConfiguration =
|
type DatabaseConfiguration =
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
MYSQL_DATABASE: string;
|
MYSQL_DATABASE: string;
|
||||||
MYSQL_PASSWORD: string;
|
MYSQL_PASSWORD: string;
|
||||||
MYSQL_ROOT_USER: string;
|
MYSQL_ROOT_USER: string;
|
||||||
MYSQL_USER: string;
|
MYSQL_USER: string;
|
||||||
MYSQL_ROOT_PASSWORD: string;
|
MYSQL_ROOT_PASSWORD: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
MONGO_INITDB_ROOT_USERNAME?: string;
|
MONGO_INITDB_ROOT_USERNAME?: string;
|
||||||
MONGO_INITDB_ROOT_PASSWORD?: string;
|
MONGO_INITDB_ROOT_PASSWORD?: string;
|
||||||
MONGODB_ROOT_USER?: string;
|
MONGODB_ROOT_USER?: string;
|
||||||
MONGODB_ROOT_PASSWORD?: string;
|
MONGODB_ROOT_PASSWORD?: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
MARIADB_ROOT_USER: string;
|
MARIADB_ROOT_USER: string;
|
||||||
MARIADB_ROOT_PASSWORD: string;
|
MARIADB_ROOT_PASSWORD: string;
|
||||||
MARIADB_USER: string;
|
MARIADB_USER: string;
|
||||||
MARIADB_PASSWORD: string;
|
MARIADB_PASSWORD: string;
|
||||||
MARIADB_DATABASE: string;
|
MARIADB_DATABASE: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
POSTGRES_PASSWORD?: string;
|
POSTGRES_PASSWORD?: string;
|
||||||
POSTGRES_USER?: string;
|
POSTGRES_USER?: string;
|
||||||
POSTGRES_DB?: string;
|
POSTGRES_DB?: string;
|
||||||
POSTGRESQL_POSTGRES_PASSWORD?: string;
|
POSTGRESQL_POSTGRES_PASSWORD?: string;
|
||||||
POSTGRESQL_USERNAME?: string;
|
POSTGRESQL_USERNAME?: string;
|
||||||
POSTGRESQL_PASSWORD?: string;
|
POSTGRESQL_PASSWORD?: string;
|
||||||
POSTGRESQL_DATABASE?: string;
|
POSTGRESQL_DATABASE?: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
REDIS_AOF_ENABLED: string;
|
REDIS_AOF_ENABLED: string;
|
||||||
REDIS_PASSWORD: string;
|
REDIS_PASSWORD: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
COUCHDB_PASSWORD: string;
|
COUCHDB_PASSWORD: string;
|
||||||
COUCHDB_USER: string;
|
COUCHDB_USER: string;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
| {
|
| {
|
||||||
volume: string;
|
volume: string;
|
||||||
image: string;
|
image: string;
|
||||||
command?: string;
|
command?: string;
|
||||||
ulimits: Record<string, unknown>;
|
ulimits: Record<string, unknown>;
|
||||||
privatePort: number;
|
privatePort: number;
|
||||||
environmentVariables: {
|
environmentVariables: {
|
||||||
EDGEDB_SERVER_PASSWORD: string;
|
EDGEDB_SERVER_PASSWORD: string;
|
||||||
EDGEDB_SERVER_USER: string;
|
EDGEDB_SERVER_USER: string;
|
||||||
EDGEDB_SERVER_DATABASE: string;
|
EDGEDB_SERVER_DATABASE: string;
|
||||||
EDGEDB_SERVER_TLS_CERT_MODE: string;
|
EDGEDB_SERVER_TLS_CERT_MODE: string;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
export function generateDatabaseConfiguration(database: any): DatabaseConfiguration {
|
export function generateDatabaseConfiguration(database: any): DatabaseConfiguration {
|
||||||
const { id, dbUser, dbUserPassword, rootUser, rootUserPassword, defaultDatabase, version, type } =
|
const { id, dbUser, dbUserPassword, rootUser, rootUserPassword, defaultDatabase, version, type } =
|
||||||
database;
|
database;
|
||||||
@@ -986,7 +1013,7 @@ export function generateDatabaseConfiguration(database: any): DatabaseConfigurat
|
|||||||
ulimits: {}
|
ulimits: {}
|
||||||
};
|
};
|
||||||
if (isARM()) {
|
if (isARM()) {
|
||||||
configuration.volume = `${id}-${type}-data:/var/lib/postgresql`;
|
configuration.volume = `${id}-${type}-data:/var/lib/postgresql/data`;
|
||||||
configuration.environmentVariables = {
|
configuration.environmentVariables = {
|
||||||
POSTGRES_PASSWORD: dbUserPassword,
|
POSTGRES_PASSWORD: dbUserPassword,
|
||||||
POSTGRES_USER: dbUser,
|
POSTGRES_USER: dbUser,
|
||||||
@@ -1011,8 +1038,9 @@ export function generateDatabaseConfiguration(database: any): DatabaseConfigurat
|
|||||||
};
|
};
|
||||||
if (isARM()) {
|
if (isARM()) {
|
||||||
configuration.volume = `${id}-${type}-data:/data`;
|
configuration.volume = `${id}-${type}-data:/data`;
|
||||||
configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no'
|
configuration.command = `/usr/local/bin/redis-server --appendonly ${
|
||||||
} --requirepass ${dbUserPassword}`;
|
appendOnly ? 'yes' : 'no'
|
||||||
|
} --requirepass ${dbUserPassword}`;
|
||||||
}
|
}
|
||||||
return configuration;
|
return configuration;
|
||||||
} else if (type === 'couchdb') {
|
} else if (type === 'couchdb') {
|
||||||
@@ -1097,12 +1125,12 @@ export type ComposeFileService = {
|
|||||||
command?: string;
|
command?: string;
|
||||||
ports?: string[];
|
ports?: string[];
|
||||||
build?:
|
build?:
|
||||||
| {
|
| {
|
||||||
context: string;
|
context: string;
|
||||||
dockerfile: string;
|
dockerfile: string;
|
||||||
args?: Record<string, unknown>;
|
args?: Record<string, unknown>;
|
||||||
}
|
}
|
||||||
| string;
|
| string;
|
||||||
deploy?: {
|
deploy?: {
|
||||||
restart_policy?: {
|
restart_policy?: {
|
||||||
condition?: string;
|
condition?: string;
|
||||||
@@ -1173,7 +1201,7 @@ export const createDirectories = async ({
|
|||||||
let workdirFound = false;
|
let workdirFound = false;
|
||||||
try {
|
try {
|
||||||
workdirFound = !!(await fs.stat(workdir));
|
workdirFound = !!(await fs.stat(workdir));
|
||||||
} catch (error) { }
|
} catch (error) {}
|
||||||
if (workdirFound) {
|
if (workdirFound) {
|
||||||
await executeCommand({ command: `rm -fr ${workdir}` });
|
await executeCommand({ command: `rm -fr ${workdir}` });
|
||||||
}
|
}
|
||||||
@@ -1633,6 +1661,9 @@ export function errorHandler({
|
|||||||
type?: string | null;
|
type?: string | null;
|
||||||
}) {
|
}) {
|
||||||
if (message.message) message = message.message;
|
if (message.message) message = message.message;
|
||||||
|
if (message.includes('Unique constraint failed')) {
|
||||||
|
message = 'This data is unique and already exists. Please try again with a different value.';
|
||||||
|
}
|
||||||
if (type === 'normal') {
|
if (type === 'normal') {
|
||||||
Sentry.captureException(message);
|
Sentry.captureException(message);
|
||||||
}
|
}
|
||||||
@@ -1697,7 +1728,7 @@ export async function stopBuild(buildId, applicationId) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
count++;
|
count++;
|
||||||
} catch (error) { }
|
} catch (error) {}
|
||||||
}, 100);
|
}, 100);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1720,7 +1751,7 @@ export async function cleanupDockerStorage(dockerId) {
|
|||||||
// Cleanup images that are not used by any container
|
// Cleanup images that are not used by any container
|
||||||
try {
|
try {
|
||||||
await executeCommand({ dockerId, command: `docker image prune -af` });
|
await executeCommand({ dockerId, command: `docker image prune -af` });
|
||||||
} catch (error) { }
|
} catch (error) {}
|
||||||
|
|
||||||
// Prune coolify managed containers
|
// Prune coolify managed containers
|
||||||
try {
|
try {
|
||||||
@@ -1728,12 +1759,12 @@ export async function cleanupDockerStorage(dockerId) {
|
|||||||
dockerId,
|
dockerId,
|
||||||
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
command: `docker container prune -f --filter "label=coolify.managed=true"`
|
||||||
});
|
});
|
||||||
} catch (error) { }
|
} catch (error) {}
|
||||||
|
|
||||||
// Cleanup build caches
|
// Cleanup build caches
|
||||||
try {
|
try {
|
||||||
await executeCommand({ dockerId, command: `docker builder prune -af` });
|
await executeCommand({ dockerId, command: `docker builder prune -af` });
|
||||||
} catch (error) { }
|
} catch (error) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function persistentVolumes(id, persistentStorage, config) {
|
export function persistentVolumes(id, persistentStorage, config) {
|
||||||
|
|||||||
@@ -1340,16 +1340,16 @@ export async function getStorages(request: FastifyRequest<OnlyId>) {
|
|||||||
export async function saveStorage(request: FastifyRequest<SaveStorage>, reply: FastifyReply) {
|
export async function saveStorage(request: FastifyRequest<SaveStorage>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params;
|
const { id } = request.params;
|
||||||
const { path, newStorage, storageId } = request.body;
|
const { hostPath, path, newStorage, storageId } = request.body;
|
||||||
|
|
||||||
if (newStorage) {
|
if (newStorage) {
|
||||||
await prisma.applicationPersistentStorage.create({
|
await prisma.applicationPersistentStorage.create({
|
||||||
data: { path, application: { connect: { id } } }
|
data: { hostPath, path, application: { connect: { id } } }
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await prisma.applicationPersistentStorage.update({
|
await prisma.applicationPersistentStorage.update({
|
||||||
where: { id: storageId },
|
where: { id: storageId },
|
||||||
data: { path }
|
data: { hostPath, path }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return reply.code(201).send();
|
return reply.code(201).send();
|
||||||
|
|||||||
@@ -96,6 +96,7 @@ export interface DeleteSecret extends OnlyId {
|
|||||||
}
|
}
|
||||||
export interface SaveStorage extends OnlyId {
|
export interface SaveStorage extends OnlyId {
|
||||||
Body: {
|
Body: {
|
||||||
|
hostPath?: string;
|
||||||
path: string;
|
path: string;
|
||||||
newStorage: boolean;
|
newStorage: boolean;
|
||||||
storageId: string;
|
storageId: string;
|
||||||
|
|||||||
@@ -302,7 +302,7 @@ export async function startDatabase(request: FastifyRequest<OnlyId>) {
|
|||||||
databaseSecret
|
databaseSecret
|
||||||
} = database;
|
} = database;
|
||||||
const { privatePort, command, environmentVariables, image, volume, ulimits } =
|
const { privatePort, command, environmentVariables, image, volume, ulimits } =
|
||||||
generateDatabaseConfiguration(database, arch);
|
generateDatabaseConfiguration(database);
|
||||||
|
|
||||||
const network = destinationDockerId && destinationDocker.network;
|
const network = destinationDockerId && destinationDocker.network;
|
||||||
const volumeName = volume.split(':')[0];
|
const volumeName = volume.split(':')[0];
|
||||||
|
|||||||
@@ -1,279 +1,353 @@
|
|||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { FastifyReply } from 'fastify';
|
import { FastifyReply } from 'fastify';
|
||||||
import sshConfig from 'ssh-config'
|
import {
|
||||||
import fs from 'fs/promises'
|
errorHandler,
|
||||||
import os from 'os';
|
executeCommand,
|
||||||
|
listSettings,
|
||||||
import { createRemoteEngineConfiguration, decrypt, errorHandler, executeCommand, listSettings, prisma, startTraefikProxy, stopTraefikProxy } from '../../../../lib/common';
|
prisma,
|
||||||
|
startTraefikProxy,
|
||||||
|
stopTraefikProxy
|
||||||
|
} from '../../../../lib/common';
|
||||||
import { checkContainer } from '../../../../lib/docker';
|
import { checkContainer } from '../../../../lib/docker';
|
||||||
|
|
||||||
import type { OnlyId } from '../../../../types';
|
import type { OnlyId } from '../../../../types';
|
||||||
import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types';
|
import type {
|
||||||
|
CheckDestination,
|
||||||
|
ListDestinations,
|
||||||
|
NewDestination,
|
||||||
|
Proxy,
|
||||||
|
SaveDestinationSettings
|
||||||
|
} from './types';
|
||||||
|
|
||||||
export async function listDestinations(request: FastifyRequest<ListDestinations>) {
|
export async function listDestinations(request: FastifyRequest<ListDestinations>) {
|
||||||
try {
|
try {
|
||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const { onlyVerified = false } = request.query
|
const { onlyVerified = false } = request.query;
|
||||||
let destinations = []
|
let destinations = [];
|
||||||
if (teamId === '0') {
|
if (teamId === '0') {
|
||||||
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
destinations = await prisma.destinationDocker.findMany({ include: { teams: true } });
|
||||||
} else {
|
} else {
|
||||||
destinations = await prisma.destinationDocker.findMany({
|
destinations = await prisma.destinationDocker.findMany({
|
||||||
where: { teams: { some: { id: teamId } } },
|
where: { teams: { some: { id: teamId } } },
|
||||||
include: { teams: true }
|
include: { teams: true }
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (onlyVerified) {
|
if (onlyVerified) {
|
||||||
destinations = destinations.filter(destination => destination.engine || (destination.remoteEngine && destination.remoteVerified))
|
destinations = destinations.filter(
|
||||||
}
|
(destination) =>
|
||||||
return {
|
destination.engine || (destination.remoteEngine && destination.remoteVerified)
|
||||||
destinations
|
);
|
||||||
}
|
}
|
||||||
} catch ({ status, message }) {
|
return {
|
||||||
return errorHandler({ status, message })
|
destinations
|
||||||
}
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function checkDestination(request: FastifyRequest<CheckDestination>) {
|
export async function checkDestination(request: FastifyRequest<CheckDestination>) {
|
||||||
try {
|
try {
|
||||||
const { network } = request.body;
|
const { network } = request.body;
|
||||||
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
const found = await prisma.destinationDocker.findFirst({ where: { network } });
|
||||||
if (found) {
|
if (found) {
|
||||||
throw {
|
throw {
|
||||||
message: `Network already exists: ${network}`
|
message: `Network already exists: ${network}`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return {}
|
return {};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function getDestination(request: FastifyRequest<OnlyId>) {
|
export async function getDestination(request: FastifyRequest<OnlyId>) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const destination = await prisma.destinationDocker.findFirst({
|
const destination = await prisma.destinationDocker.findFirst({
|
||||||
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
include: { sshKey: true, application: true, service: true, database: true }
|
include: { sshKey: true, application: true, service: true, database: true }
|
||||||
});
|
});
|
||||||
if (!destination && id !== 'new') {
|
if (!destination && id !== 'new') {
|
||||||
throw { status: 404, message: `Destination not found.` };
|
throw { status: 404, message: `Destination not found.` };
|
||||||
}
|
}
|
||||||
const settings = await listSettings();
|
const settings = await listSettings();
|
||||||
const payload = {
|
const payload = {
|
||||||
destination,
|
destination,
|
||||||
settings
|
settings
|
||||||
};
|
};
|
||||||
return {
|
return {
|
||||||
...payload
|
...payload
|
||||||
};
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
} catch ({ status, message }) {
|
return errorHandler({ status, message });
|
||||||
return errorHandler({ status, message })
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
export async function newDestination(request: FastifyRequest<NewDestination>, reply: FastifyReply) {
|
export async function newDestination(request: FastifyRequest<NewDestination>, reply: FastifyReply) {
|
||||||
try {
|
try {
|
||||||
const teamId = request.user.teamId;
|
const teamId = request.user.teamId;
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
|
|
||||||
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } = request.body
|
let { name, network, engine, isCoolifyProxyUsed, remoteIpAddress, remoteUser, remotePort } =
|
||||||
if (id === 'new') {
|
request.body;
|
||||||
if (engine) {
|
if (id === 'new') {
|
||||||
const { stdout } = await await executeCommand({ command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'` });
|
if (engine) {
|
||||||
if (stdout === '') {
|
const { stdout } = await await executeCommand({
|
||||||
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
command: `docker network ls --filter 'name=^${network}$' --format '{{json .}}'`
|
||||||
}
|
});
|
||||||
await prisma.destinationDocker.create({
|
if (stdout === '') {
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
await await executeCommand({ command: `docker network create --attachable ${network}` });
|
||||||
});
|
}
|
||||||
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
await prisma.destinationDocker.create({
|
||||||
const destination = destinations.find((destination) => destination.network === network);
|
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed }
|
||||||
if (destinations.length > 0) {
|
});
|
||||||
const proxyConfigured = destinations.find(
|
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||||
(destination) => destination.network !== network && destination.isCoolifyProxyUsed === true
|
const destination = destinations.find((destination) => destination.network === network);
|
||||||
);
|
if (destinations.length > 0) {
|
||||||
if (proxyConfigured) {
|
const proxyConfigured = destinations.find(
|
||||||
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
(destination) =>
|
||||||
}
|
destination.network !== network && destination.isCoolifyProxyUsed === true
|
||||||
await prisma.destinationDocker.updateMany({ where: { engine }, data: { isCoolifyProxyUsed } });
|
);
|
||||||
}
|
if (proxyConfigured) {
|
||||||
if (isCoolifyProxyUsed) {
|
isCoolifyProxyUsed = !!proxyConfigured.isCoolifyProxyUsed;
|
||||||
await startTraefikProxy(destination.id);
|
}
|
||||||
}
|
await prisma.destinationDocker.updateMany({
|
||||||
return reply.code(201).send({ id: destination.id });
|
where: { engine },
|
||||||
} else {
|
data: { isCoolifyProxyUsed }
|
||||||
const destination = await prisma.destinationDocker.create({
|
});
|
||||||
data: { name, teams: { connect: { id: teamId } }, engine, network, isCoolifyProxyUsed, remoteEngine: true, remoteIpAddress, remoteUser, remotePort: Number(remotePort) }
|
}
|
||||||
});
|
if (isCoolifyProxyUsed) {
|
||||||
return reply.code(201).send({ id: destination.id })
|
await startTraefikProxy(destination.id);
|
||||||
}
|
}
|
||||||
} else {
|
return reply.code(201).send({ id: destination.id });
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
} else {
|
||||||
return reply.code(201).send();
|
const destination = await prisma.destinationDocker.create({
|
||||||
}
|
data: {
|
||||||
|
name,
|
||||||
} catch ({ status, message }) {
|
teams: { connect: { id: teamId } },
|
||||||
return errorHandler({ status, message })
|
engine,
|
||||||
}
|
network,
|
||||||
|
isCoolifyProxyUsed,
|
||||||
|
remoteEngine: true,
|
||||||
|
remoteIpAddress,
|
||||||
|
remoteUser,
|
||||||
|
remotePort: Number(remotePort)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return reply.code(201).send({ id: destination.id });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await prisma.destinationDocker.update({ where: { id }, data: { name, engine, network } });
|
||||||
|
return reply.code(201).send();
|
||||||
|
}
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function deleteDestination(request: FastifyRequest<OnlyId>) {
|
export async function deleteDestination(request: FastifyRequest<OnlyId>) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const { network, remoteVerified, engine, isCoolifyProxyUsed } = await prisma.destinationDocker.findUnique({ where: { id } });
|
const appFound = await prisma.application.findFirst({ where: { destinationDockerId: id } });
|
||||||
if (isCoolifyProxyUsed) {
|
const serviceFound = await prisma.service.findFirst({ where: { destinationDockerId: id } });
|
||||||
if (engine || remoteVerified) {
|
const databaseFound = await prisma.database.findFirst({ where: { destinationDockerId: id } });
|
||||||
const { stdout: found } = await executeCommand({
|
if (appFound || serviceFound || databaseFound) {
|
||||||
dockerId: id,
|
throw {
|
||||||
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
message: `Destination is in use.<br>Remove all applications, services and databases using this destination first.`
|
||||||
})
|
};
|
||||||
if (found) {
|
}
|
||||||
await executeCommand({ dockerId: id, command: `docker network disconnect ${network} coolify-proxy` })
|
const { network, remoteVerified, engine, isCoolifyProxyUsed } =
|
||||||
await executeCommand({ dockerId: id, command: `docker network rm ${network}` })
|
await prisma.destinationDocker.findUnique({ where: { id } });
|
||||||
}
|
if (isCoolifyProxyUsed) {
|
||||||
}
|
if (engine || remoteVerified) {
|
||||||
}
|
const { stdout: found } = await executeCommand({
|
||||||
await prisma.destinationDocker.delete({ where: { id } });
|
dockerId: id,
|
||||||
return {}
|
command: `docker ps -a --filter network=${network} --filter name=coolify-proxy --format '{{.}}'`
|
||||||
} catch ({ status, message }) {
|
});
|
||||||
return errorHandler({ status, message })
|
if (found) {
|
||||||
}
|
await executeCommand({
|
||||||
|
dockerId: id,
|
||||||
|
command: `docker network disconnect ${network} coolify-proxy`
|
||||||
|
});
|
||||||
|
await executeCommand({ dockerId: id, command: `docker network rm ${network}` });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await prisma.destinationDocker.delete({ where: { id } });
|
||||||
|
return {};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function saveDestinationSettings(request: FastifyRequest<SaveDestinationSettings>) {
|
export async function saveDestinationSettings(request: FastifyRequest<SaveDestinationSettings>) {
|
||||||
try {
|
try {
|
||||||
const { engine, isCoolifyProxyUsed } = request.body;
|
const { engine, isCoolifyProxyUsed } = request.body;
|
||||||
await prisma.destinationDocker.updateMany({
|
await prisma.destinationDocker.updateMany({
|
||||||
where: { engine },
|
where: { engine },
|
||||||
data: { isCoolifyProxyUsed }
|
data: { isCoolifyProxyUsed }
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
status: 202
|
status: 202
|
||||||
}
|
};
|
||||||
// return reply.code(201).send();
|
// return reply.code(201).send();
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function startProxy(request: FastifyRequest<Proxy>) {
|
export async function startProxy(request: FastifyRequest<Proxy>) {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
try {
|
try {
|
||||||
await startTraefikProxy(id);
|
await startTraefikProxy(id);
|
||||||
return {}
|
return {};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
await stopTraefikProxy(id);
|
await stopTraefikProxy(id);
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function stopProxy(request: FastifyRequest<Proxy>) {
|
export async function stopProxy(request: FastifyRequest<Proxy>) {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
try {
|
try {
|
||||||
await stopTraefikProxy(id);
|
await stopTraefikProxy(id);
|
||||||
return {}
|
return {};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function restartProxy(request: FastifyRequest<Proxy>) {
|
export async function restartProxy(request: FastifyRequest<Proxy>) {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
try {
|
try {
|
||||||
await stopTraefikProxy(id);
|
await stopTraefikProxy(id);
|
||||||
await startTraefikProxy(id);
|
await startTraefikProxy(id);
|
||||||
await prisma.destinationDocker.update({
|
await prisma.destinationDocker.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: true }
|
data: { isCoolifyProxyUsed: true }
|
||||||
});
|
});
|
||||||
return {}
|
return {};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
await prisma.destinationDocker.update({
|
await prisma.destinationDocker.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isCoolifyProxyUsed: false }
|
data: { isCoolifyProxyUsed: false }
|
||||||
});
|
});
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function assignSSHKey(request: FastifyRequest) {
|
export async function assignSSHKey(request: FastifyRequest) {
|
||||||
try {
|
try {
|
||||||
const { id: sshKeyId } = request.body;
|
const { id: sshKeyId } = request.body;
|
||||||
const { id } = request.params;
|
const { id } = request.params;
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { sshKey: { connect: { id: sshKeyId } } } })
|
await prisma.destinationDocker.update({
|
||||||
return {}
|
where: { id },
|
||||||
} catch ({ status, message }) {
|
data: { sshKey: { connect: { id: sshKeyId } } }
|
||||||
return errorHandler({ status, message })
|
});
|
||||||
}
|
return {};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngineFn(id: string) {
|
export async function verifyRemoteDockerEngineFn(id: string) {
|
||||||
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst({ where: { id } })
|
const { remoteIpAddress, network, isCoolifyProxyUsed } = await prisma.destinationDocker.findFirst(
|
||||||
const daemonJson = `daemon-${id}.json`
|
{ where: { id } }
|
||||||
try {
|
);
|
||||||
await executeCommand({ sshCommand: true, command: `docker network inspect ${network}`, dockerId: id });
|
const daemonJson = `daemon-${id}.json`;
|
||||||
} catch (error) {
|
try {
|
||||||
await executeCommand({ command: `docker network create --attachable ${network}`, dockerId: id });
|
await executeCommand({
|
||||||
}
|
sshCommand: true,
|
||||||
|
command: `docker network inspect ${network}`,
|
||||||
|
dockerId: id
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
await executeCommand({
|
||||||
|
command: `docker network create --attachable ${network}`,
|
||||||
|
dockerId: id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await executeCommand({ sshCommand: true, command: `docker network inspect coolify-infra`, dockerId: id });
|
await executeCommand({
|
||||||
} catch (error) {
|
sshCommand: true,
|
||||||
await executeCommand({ command: `docker network create --attachable coolify-infra`, dockerId: id });
|
command: `docker network inspect coolify-infra`,
|
||||||
}
|
dockerId: id
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
await executeCommand({
|
||||||
|
command: `docker network create --attachable coolify-infra`,
|
||||||
|
dockerId: id
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
if (isCoolifyProxyUsed) await startTraefikProxy(id);
|
||||||
let isUpdated = false;
|
let isUpdated = false;
|
||||||
let daemonJsonParsed = {
|
let daemonJsonParsed = {
|
||||||
"live-restore": true,
|
'live-restore': true,
|
||||||
"features": {
|
features: {
|
||||||
"buildkit": true
|
buildkit: true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
const { stdout: daemonJson } = await executeCommand({ sshCommand: true, dockerId: id, command: `cat /etc/docker/daemon.json` });
|
const { stdout: daemonJson } = await executeCommand({
|
||||||
daemonJsonParsed = JSON.parse(daemonJson);
|
sshCommand: true,
|
||||||
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
dockerId: id,
|
||||||
isUpdated = true;
|
command: `cat /etc/docker/daemon.json`
|
||||||
daemonJsonParsed['live-restore'] = true
|
});
|
||||||
|
daemonJsonParsed = JSON.parse(daemonJson);
|
||||||
}
|
if (!daemonJsonParsed['live-restore'] || daemonJsonParsed['live-restore'] !== true) {
|
||||||
if (!daemonJsonParsed?.features?.buildkit) {
|
isUpdated = true;
|
||||||
isUpdated = true;
|
daemonJsonParsed['live-restore'] = true;
|
||||||
daemonJsonParsed.features = {
|
}
|
||||||
buildkit: true
|
if (!daemonJsonParsed?.features?.buildkit) {
|
||||||
}
|
isUpdated = true;
|
||||||
}
|
daemonJsonParsed.features = {
|
||||||
} catch (error) {
|
buildkit: true
|
||||||
isUpdated = true;
|
};
|
||||||
}
|
}
|
||||||
try {
|
} catch (error) {
|
||||||
if (isUpdated) {
|
isUpdated = true;
|
||||||
await executeCommand({ shell: true, command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}` })
|
}
|
||||||
await executeCommand({ dockerId: id, command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json` });
|
try {
|
||||||
await executeCommand({ command: `rm /tmp/${daemonJson}` })
|
if (isUpdated) {
|
||||||
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
await executeCommand({
|
||||||
}
|
shell: true,
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } })
|
command: `echo '${JSON.stringify(daemonJsonParsed, null, 2)}' > /tmp/${daemonJson}`
|
||||||
} catch (error) {
|
});
|
||||||
throw new Error('Error while verifying remote docker engine')
|
await executeCommand({
|
||||||
}
|
dockerId: id,
|
||||||
|
command: `scp /tmp/${daemonJson} ${remoteIpAddress}-remote:/etc/docker/daemon.json`
|
||||||
|
});
|
||||||
|
await executeCommand({ command: `rm /tmp/${daemonJson}` });
|
||||||
|
await executeCommand({ sshCommand: true, dockerId: id, command: `systemctl restart docker` });
|
||||||
|
}
|
||||||
|
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } });
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Error while verifying remote docker engine');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function verifyRemoteDockerEngine(request: FastifyRequest<OnlyId>, reply: FastifyReply) {
|
export async function verifyRemoteDockerEngine(
|
||||||
const { id } = request.params;
|
request: FastifyRequest<OnlyId>,
|
||||||
try {
|
reply: FastifyReply
|
||||||
await verifyRemoteDockerEngineFn(id);
|
) {
|
||||||
return reply.code(201).send()
|
const { id } = request.params;
|
||||||
} catch ({ status, message }) {
|
try {
|
||||||
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } })
|
await verifyRemoteDockerEngineFn(id);
|
||||||
return errorHandler({ status, message })
|
return reply.code(201).send();
|
||||||
}
|
} catch ({ status, message }) {
|
||||||
|
await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: false } });
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
|
export async function getDestinationStatus(request: FastifyRequest<OnlyId>) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const destination = await prisma.destinationDocker.findUnique({ where: { id } })
|
const destination = await prisma.destinationDocker.findUnique({ where: { id } });
|
||||||
const { found: isRunning } = await checkContainer({ dockerId: destination.id, container: 'coolify-proxy', remove: true })
|
const { found: isRunning } = await checkContainer({
|
||||||
return {
|
dockerId: destination.id,
|
||||||
isRunning
|
container: 'coolify-proxy',
|
||||||
}
|
remove: true
|
||||||
} catch ({ status, message }) {
|
});
|
||||||
return errorHandler({ status, message })
|
return {
|
||||||
}
|
isRunning
|
||||||
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -156,14 +156,21 @@ export async function update(request: FastifyRequest<Update>) {
|
|||||||
try {
|
try {
|
||||||
if (!isDev) {
|
if (!isDev) {
|
||||||
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
const { isAutoUpdateEnabled } = await prisma.setting.findFirst();
|
||||||
await executeCommand({ command: `docker pull coollabsio/coolify:${latestVersion}` });
|
let image = `ghcr.io/coollabsio/coolify:${latestVersion}`;
|
||||||
|
try {
|
||||||
|
await executeCommand({ command: `docker pull ${image}` });
|
||||||
|
} catch (error) {
|
||||||
|
image = `coollabsio/coolify:${latestVersion}`;
|
||||||
|
await executeCommand({ command: `docker pull ${image}` });
|
||||||
|
}
|
||||||
|
|
||||||
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` });
|
||||||
await executeCommand({
|
await executeCommand({
|
||||||
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env`
|
||||||
});
|
});
|
||||||
await executeCommand({
|
await executeCommand({
|
||||||
shell: true,
|
shell: true,
|
||||||
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db coollabsio/coolify:${latestVersion} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
command: `docker run --rm -tid --env-file .env -v /var/run/docker.sock:/var/run/docker.sock -v coolify-db ${image} /bin/sh -c "env | grep COOLIFY > .env && echo 'TAG=${latestVersion}' >> .env && docker stop -t 0 coolify coolify-fluentbit && docker rm coolify coolify-fluentbit && docker compose pull && docker compose up -d --force-recreate"`
|
||||||
});
|
});
|
||||||
return {};
|
return {};
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,191 +1,231 @@
|
|||||||
import cuid from 'cuid';
|
import cuid from 'cuid';
|
||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { FastifyReply } from 'fastify';
|
|
||||||
import { decrypt, encrypt, errorHandler, prisma } from '../../../../lib/common';
|
import { decrypt, encrypt, errorHandler, prisma } from '../../../../lib/common';
|
||||||
import { OnlyId } from '../../../../types';
|
import { OnlyId } from '../../../../types';
|
||||||
import { CheckGitLabOAuthId, SaveGitHubSource, SaveGitLabSource } from './types';
|
import { CheckGitLabOAuthId, SaveGitHubSource, SaveGitLabSource } from './types';
|
||||||
|
|
||||||
export async function listSources(request: FastifyRequest) {
|
export async function listSources(request: FastifyRequest) {
|
||||||
try {
|
try {
|
||||||
const teamId = request.user?.teamId;
|
const teamId = request.user?.teamId;
|
||||||
const sources = await prisma.gitSource.findMany({
|
const sources = await prisma.gitSource.findMany({
|
||||||
where: { OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
where: {
|
||||||
include: { teams: true, githubApp: true, gitlabApp: true }
|
OR: [
|
||||||
});
|
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
return {
|
{ isSystemWide: true }
|
||||||
sources
|
]
|
||||||
}
|
},
|
||||||
} catch ({ status, message }) {
|
include: { teams: true, githubApp: true, gitlabApp: true }
|
||||||
return errorHandler({ status, message })
|
});
|
||||||
}
|
return {
|
||||||
|
sources
|
||||||
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function saveSource(request, reply) {
|
export async function saveSource(request, reply) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body
|
let { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide } = request.body;
|
||||||
if (customPort) customPort = Number(customPort)
|
if (customPort) customPort = Number(customPort);
|
||||||
await prisma.gitSource.update({
|
await prisma.gitSource.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
data: { name, htmlUrl, apiUrl, customPort, customUser, isSystemWide }
|
||||||
});
|
});
|
||||||
return reply.code(201).send()
|
return reply.code(201).send();
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function getSource(request: FastifyRequest<OnlyId>) {
|
export async function getSource(request: FastifyRequest<OnlyId>) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user;
|
||||||
const settings = await prisma.setting.findFirst({});
|
const settings = await prisma.setting.findFirst({});
|
||||||
|
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
return {
|
return {
|
||||||
source: {
|
source: {
|
||||||
name: null,
|
name: null,
|
||||||
type: null,
|
type: null,
|
||||||
htmlUrl: null,
|
htmlUrl: null,
|
||||||
apiUrl: null,
|
apiUrl: null,
|
||||||
organization: null,
|
organization: null,
|
||||||
customPort: 22,
|
customPort: 22,
|
||||||
customUser: 'git',
|
customUser: 'git'
|
||||||
},
|
},
|
||||||
settings
|
settings
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const source = await prisma.gitSource.findFirst({
|
const source = await prisma.gitSource.findFirst({
|
||||||
where: { id, OR: [{ teams: { some: { id: teamId === "0" ? undefined : teamId } } }, { isSystemWide: true }] },
|
where: {
|
||||||
include: { githubApp: true, gitlabApp: true }
|
id,
|
||||||
});
|
OR: [
|
||||||
if (!source) {
|
{ teams: { some: { id: teamId === '0' ? undefined : teamId } } },
|
||||||
throw { status: 404, message: 'Source not found.' }
|
{ isSystemWide: true }
|
||||||
}
|
]
|
||||||
|
},
|
||||||
|
include: { githubApp: true, gitlabApp: true }
|
||||||
|
});
|
||||||
|
if (!source) {
|
||||||
|
throw { status: 404, message: 'Source not found.' };
|
||||||
|
}
|
||||||
|
|
||||||
if (source?.githubApp?.clientSecret)
|
if (source?.githubApp?.clientSecret)
|
||||||
source.githubApp.clientSecret = decrypt(source.githubApp.clientSecret);
|
source.githubApp.clientSecret = decrypt(source.githubApp.clientSecret);
|
||||||
if (source?.githubApp?.webhookSecret)
|
if (source?.githubApp?.webhookSecret)
|
||||||
source.githubApp.webhookSecret = decrypt(source.githubApp.webhookSecret);
|
source.githubApp.webhookSecret = decrypt(source.githubApp.webhookSecret);
|
||||||
if (source?.githubApp?.privateKey) source.githubApp.privateKey = decrypt(source.githubApp.privateKey);
|
if (source?.githubApp?.privateKey)
|
||||||
if (source?.gitlabApp?.appSecret) source.gitlabApp.appSecret = decrypt(source.gitlabApp.appSecret);
|
source.githubApp.privateKey = decrypt(source.githubApp.privateKey);
|
||||||
|
if (source?.gitlabApp?.appSecret)
|
||||||
|
source.gitlabApp.appSecret = decrypt(source.gitlabApp.appSecret);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
source,
|
source,
|
||||||
settings
|
settings
|
||||||
};
|
};
|
||||||
|
} catch ({ status, message }) {
|
||||||
} catch ({ status, message }) {
|
return errorHandler({ status, message });
|
||||||
return errorHandler({ status, message })
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteSource(request) {
|
export async function deleteSource(request) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const source = await prisma.gitSource.delete({
|
const gitAppFound = await prisma.application.findFirst({ where: { gitSourceId: id } });
|
||||||
where: { id },
|
if (gitAppFound) {
|
||||||
include: { githubApp: true, gitlabApp: true }
|
throw {
|
||||||
});
|
status: 400,
|
||||||
if (source.githubAppId) {
|
message: 'This source is used by an application. Please remove the application first.'
|
||||||
await prisma.githubApp.delete({ where: { id: source.githubAppId } });
|
};
|
||||||
}
|
}
|
||||||
if (source.gitlabAppId) {
|
const source = await prisma.gitSource.delete({
|
||||||
await prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
|
where: { id },
|
||||||
}
|
include: { githubApp: true, gitlabApp: true }
|
||||||
return {}
|
});
|
||||||
} catch ({ status, message }) {
|
if (source.githubAppId) {
|
||||||
return errorHandler({ status, message })
|
await prisma.githubApp.delete({ where: { id: source.githubAppId } });
|
||||||
}
|
}
|
||||||
|
if (source.gitlabAppId) {
|
||||||
|
await prisma.gitlabApp.delete({ where: { id: source.gitlabAppId } });
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>) {
|
export async function saveGitHubSource(request: FastifyRequest<SaveGitHubSource>) {
|
||||||
try {
|
try {
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user;
|
||||||
|
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body
|
let { name, htmlUrl, apiUrl, organization, customPort, isSystemWide } = request.body;
|
||||||
|
|
||||||
if (customPort) customPort = Number(customPort)
|
if (customPort) customPort = Number(customPort);
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
const newId = cuid()
|
const newId = cuid();
|
||||||
await prisma.gitSource.create({
|
await prisma.gitSource.create({
|
||||||
data: {
|
data: {
|
||||||
id: newId,
|
id: newId,
|
||||||
name,
|
name,
|
||||||
htmlUrl,
|
htmlUrl,
|
||||||
apiUrl,
|
apiUrl,
|
||||||
organization,
|
organization,
|
||||||
customPort,
|
customPort,
|
||||||
isSystemWide,
|
isSystemWide,
|
||||||
type: 'github',
|
type: 'github',
|
||||||
teams: { connect: { id: teamId } }
|
teams: { connect: { id: teamId } }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
id: newId
|
id: newId
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
throw { status: 500, message: 'Wrong request.' }
|
throw { status: 500, message: 'Wrong request.' };
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>) {
|
export async function saveGitLabSource(request: FastifyRequest<SaveGitLabSource>) {
|
||||||
try {
|
try {
|
||||||
const { id } = request.params
|
const { id } = request.params;
|
||||||
const { teamId } = request.user
|
const { teamId } = request.user;
|
||||||
let { type, name, htmlUrl, apiUrl, oauthId, appId, appSecret, groupName, customPort, customUser } =
|
let {
|
||||||
request.body
|
type,
|
||||||
|
name,
|
||||||
|
htmlUrl,
|
||||||
|
apiUrl,
|
||||||
|
oauthId,
|
||||||
|
appId,
|
||||||
|
appSecret,
|
||||||
|
groupName,
|
||||||
|
customPort,
|
||||||
|
customUser
|
||||||
|
} = request.body;
|
||||||
|
|
||||||
if (oauthId) oauthId = Number(oauthId);
|
if (oauthId) oauthId = Number(oauthId);
|
||||||
if (customPort) customPort = Number(customPort)
|
if (customPort) customPort = Number(customPort);
|
||||||
const encryptedAppSecret = encrypt(appSecret);
|
const encryptedAppSecret = encrypt(appSecret);
|
||||||
|
|
||||||
if (id === 'new') {
|
if (id === 'new') {
|
||||||
const newId = cuid()
|
const newId = cuid();
|
||||||
await prisma.gitSource.create({ data: { id: newId, type, apiUrl, htmlUrl, name, customPort, customUser, teams: { connect: { id: teamId } } } });
|
await prisma.gitSource.create({
|
||||||
await prisma.gitlabApp.create({
|
data: {
|
||||||
data: {
|
id: newId,
|
||||||
teams: { connect: { id: teamId } },
|
type,
|
||||||
appId,
|
apiUrl,
|
||||||
oauthId,
|
htmlUrl,
|
||||||
groupName,
|
name,
|
||||||
appSecret: encryptedAppSecret,
|
customPort,
|
||||||
gitSource: { connect: { id: newId } }
|
customUser,
|
||||||
}
|
teams: { connect: { id: teamId } }
|
||||||
});
|
}
|
||||||
return {
|
});
|
||||||
status: 201,
|
await prisma.gitlabApp.create({
|
||||||
id: newId
|
data: {
|
||||||
}
|
teams: { connect: { id: teamId } },
|
||||||
} else {
|
appId,
|
||||||
await prisma.gitSource.update({ where: { id }, data: { type, apiUrl, htmlUrl, name, customPort, customUser } });
|
oauthId,
|
||||||
await prisma.gitlabApp.update({
|
groupName,
|
||||||
where: { id },
|
appSecret: encryptedAppSecret,
|
||||||
data: {
|
gitSource: { connect: { id: newId } }
|
||||||
appId,
|
}
|
||||||
oauthId,
|
});
|
||||||
groupName,
|
return {
|
||||||
appSecret: encryptedAppSecret,
|
status: 201,
|
||||||
}
|
id: newId
|
||||||
});
|
};
|
||||||
}
|
} else {
|
||||||
return { status: 201 };
|
await prisma.gitSource.update({
|
||||||
|
where: { id },
|
||||||
} catch ({ status, message }) {
|
data: { type, apiUrl, htmlUrl, name, customPort, customUser }
|
||||||
return errorHandler({ status, message })
|
});
|
||||||
}
|
await prisma.gitlabApp.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
appId,
|
||||||
|
oauthId,
|
||||||
|
groupName,
|
||||||
|
appSecret: encryptedAppSecret
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return { status: 201 };
|
||||||
|
} catch ({ status, message }) {
|
||||||
|
return errorHandler({ status, message });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function checkGitLabOAuthID(request: FastifyRequest<CheckGitLabOAuthId>) {
|
export async function checkGitLabOAuthID(request: FastifyRequest<CheckGitLabOAuthId>) {
|
||||||
try {
|
try {
|
||||||
const { oauthId } = request.body
|
const { oauthId } = request.body;
|
||||||
const found = await prisma.gitlabApp.findFirst({ where: { oauthId: Number(oauthId) } });
|
const found = await prisma.gitlabApp.findFirst({ where: { oauthId: Number(oauthId) } });
|
||||||
if (found) {
|
if (found) {
|
||||||
throw { status: 500, message: 'OAuthID already configured in Coolify.' }
|
throw { status: 500, message: 'OAuthID already configured in Coolify.' };
|
||||||
}
|
}
|
||||||
return {}
|
return {};
|
||||||
} catch ({ status, message }) {
|
} catch ({ status, message }) {
|
||||||
return errorHandler({ status, message })
|
return errorHandler({ status, message });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -543,6 +543,9 @@ export async function proxyConfiguration(request: FastifyRequest<OnlyId>, remote
|
|||||||
const template: any = await parseAndFindServiceTemplates(service, null, true);
|
const template: any = await parseAndFindServiceTemplates(service, null, true);
|
||||||
const { proxy } = template.services[oneService] || found.services[oneService];
|
const { proxy } = template.services[oneService] || found.services[oneService];
|
||||||
for (let configuration of proxy) {
|
for (let configuration of proxy) {
|
||||||
|
if (configuration.hostPort) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
if (configuration.domain) {
|
if (configuration.domain) {
|
||||||
const setting = serviceSetting.find(
|
const setting = serviceSetting.find(
|
||||||
(a) => a.variableName === configuration.domain
|
(a) => a.variableName === configuration.domain
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -12,6 +12,7 @@
|
|||||||
import { errorNotification } from '$lib/common';
|
import { errorNotification } from '$lib/common';
|
||||||
import { addToast } from '$lib/store';
|
import { addToast } from '$lib/store';
|
||||||
import CopyVolumeField from '$lib/components/CopyVolumeField.svelte';
|
import CopyVolumeField from '$lib/components/CopyVolumeField.svelte';
|
||||||
|
import SimpleExplainer from '$lib/components/SimpleExplainer.svelte';
|
||||||
const { id } = $page.params;
|
const { id } = $page.params;
|
||||||
let isHttps = browser && window.location.protocol === 'https:';
|
let isHttps = browser && window.location.protocol === 'https:';
|
||||||
export let value: string;
|
export let value: string;
|
||||||
@@ -33,11 +34,13 @@
|
|||||||
storage.path.replace(/\/\//g, '/');
|
storage.path.replace(/\/\//g, '/');
|
||||||
await post(`/applications/${id}/storages`, {
|
await post(`/applications/${id}/storages`, {
|
||||||
path: storage.path,
|
path: storage.path,
|
||||||
|
hostPath: storage.hostPath,
|
||||||
storageId: storage.id,
|
storageId: storage.id,
|
||||||
newStorage
|
newStorage
|
||||||
});
|
});
|
||||||
dispatch('refresh');
|
dispatch('refresh');
|
||||||
if (isNew) {
|
if (isNew) {
|
||||||
|
storage.hostPath = null;
|
||||||
storage.path = null;
|
storage.path = null;
|
||||||
storage.id = null;
|
storage.id = null;
|
||||||
}
|
}
|
||||||
@@ -80,27 +83,42 @@
|
|||||||
<div class="flex gap-4 pb-2" class:pt-8={isNew}>
|
<div class="flex gap-4 pb-2" class:pt-8={isNew}>
|
||||||
{#if storage.applicationId}
|
{#if storage.applicationId}
|
||||||
{#if storage.oldPath}
|
{#if storage.oldPath}
|
||||||
|
<CopyVolumeField
|
||||||
<CopyVolumeField
|
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
||||||
|
/>
|
||||||
|
{:else if !storage.hostPath}
|
||||||
|
<CopyVolumeField
|
||||||
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
||||||
/>
|
|
||||||
{:else}
|
|
||||||
|
|
||||||
<CopyVolumeField
|
|
||||||
value="{storage.applicationId}{storage.path.replace(/\//gi, '-').replace('-app', '')}"
|
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
{#if isNew}
|
||||||
|
<div class="w-full">
|
||||||
|
<input
|
||||||
|
disabled={!isNew}
|
||||||
|
readonly={!isNew}
|
||||||
|
bind:value={storage.hostPath}
|
||||||
|
placeholder="Host path, example: ~/.directory"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SimpleExplainer
|
||||||
|
text="You can mount <span class='text-yellow-400 font-bold'>host paths</span> from the operating system.<br>Leave it empty to define a volume based volume."
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{:else if storage.hostPath}
|
||||||
|
<input disabled readonly value={storage.hostPath} />
|
||||||
|
{/if}
|
||||||
<input
|
<input
|
||||||
disabled={!isNew}
|
disabled={!isNew}
|
||||||
readonly={!isNew}
|
readonly={!isNew}
|
||||||
class="w-full"
|
class="w-full"
|
||||||
bind:value={storage.path}
|
bind:value={storage.path}
|
||||||
required
|
required
|
||||||
placeholder="eg: /data"
|
placeholder="Mount point inside the container, example: /data"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div class="flex items-center justify-center">
|
<div class="flex items-start justify-center">
|
||||||
{#if isNew}
|
{#if isNew}
|
||||||
<div class="w-full lg:w-64">
|
<div class="w-full lg:w-64">
|
||||||
<button class="btn btn-sm btn-primary w-full" on:click={() => saveStorage(true)}
|
<button class="btn btn-sm btn-primary w-full" on:click={() => saveStorage(true)}
|
||||||
|
|||||||
@@ -427,29 +427,6 @@
|
|||||||
</svg> Stop
|
</svg> Stop
|
||||||
</button>
|
</button>
|
||||||
{:else if $isDeploymentEnabled && !$page.url.pathname.startsWith(`/applications/${id}/configuration/`)}
|
{:else if $isDeploymentEnabled && !$page.url.pathname.startsWith(`/applications/${id}/configuration/`)}
|
||||||
{#if $status.application.overallStatus === 'degraded'}
|
|
||||||
<button
|
|
||||||
on:click={stopApplication}
|
|
||||||
type="submit"
|
|
||||||
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
|
||||||
class="btn btn-sm gap-2"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
class="w-6 h-6 text-error"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
stroke-width="1.5"
|
|
||||||
stroke="currentColor"
|
|
||||||
fill="none"
|
|
||||||
stroke-linecap="round"
|
|
||||||
stroke-linejoin="round"
|
|
||||||
>
|
|
||||||
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
|
||||||
<rect x="6" y="5" width="4" height="14" rx="1" />
|
|
||||||
<rect x="14" y="5" width="4" height="14" rx="1" />
|
|
||||||
</svg> Stop
|
|
||||||
</button>
|
|
||||||
{/if}
|
|
||||||
<button
|
<button
|
||||||
class="btn btn-sm gap-2"
|
class="btn btn-sm gap-2"
|
||||||
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
||||||
@@ -493,6 +470,29 @@
|
|||||||
: 'Redeploy Stack'
|
: 'Redeploy Stack'
|
||||||
: 'Deploy'}
|
: 'Deploy'}
|
||||||
</button>
|
</button>
|
||||||
|
{#if $status.application.overallStatus === 'degraded'}
|
||||||
|
<button
|
||||||
|
on:click={stopApplication}
|
||||||
|
type="submit"
|
||||||
|
disabled={!$isDeploymentEnabled || !$appSession.isAdmin}
|
||||||
|
class="btn btn-sm gap-2"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
class="w-6 h-6 text-error"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
fill="none"
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
>
|
||||||
|
<path stroke="none" d="M0 0h24v24H0z" fill="none" />
|
||||||
|
<rect x="6" y="5" width="4" height="14" rx="1" />
|
||||||
|
<rect x="14" y="5" width="4" height="14" rx="1" />
|
||||||
|
</svg> Stop
|
||||||
|
</button>
|
||||||
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
{#if $location && $status.application.overallStatus === 'healthy'}
|
{#if $location && $status.application.overallStatus === 'healthy'}
|
||||||
<a href={$location} target="_blank noreferrer" class="btn btn-sm gap-2 text-sm bg-primary"
|
<a href={$location} target="_blank noreferrer" class="btn btn-sm gap-2 text-sm bg-primary"
|
||||||
|
|||||||
@@ -158,7 +158,7 @@
|
|||||||
id="dockerImage"
|
id="dockerImage"
|
||||||
name="dockerImage"
|
name="dockerImage"
|
||||||
required
|
required
|
||||||
placeholder="coollabsio/coolify:0.0.1"
|
placeholder="ghcr.io/coollabsio/coolify:0.0.1"
|
||||||
bind:value={remoteImage}
|
bind:value={remoteImage}
|
||||||
/>
|
/>
|
||||||
<button class="btn btn-sm btn-primary" type="submit">Revert Now</button>
|
<button class="btn btn-sm btn-primary" type="submit">Revert Now</button>
|
||||||
|
|||||||
@@ -35,17 +35,46 @@
|
|||||||
for (const [_, service] of Object.entries(composeJson.services)) {
|
for (const [_, service] of Object.entries(composeJson.services)) {
|
||||||
if (service?.volumes) {
|
if (service?.volumes) {
|
||||||
for (const [_, volumeName] of Object.entries(service.volumes)) {
|
for (const [_, volumeName] of Object.entries(service.volumes)) {
|
||||||
let [volume, target] = volumeName.split(':');
|
if (typeof volumeName === 'string') {
|
||||||
if (volume === '.') {
|
let [volume, target] = volumeName.split(':');
|
||||||
volume = target;
|
if (
|
||||||
|
volume.startsWith('.') ||
|
||||||
|
volume.startsWith('..') ||
|
||||||
|
volume.startsWith('/') ||
|
||||||
|
volume.startsWith('~') ||
|
||||||
|
volume.startsWith('$PWD')
|
||||||
|
) {
|
||||||
|
volume = volume.replace(/^\./, `~`).replace(/^\.\./, '~').replace(/^\$PWD/, '~');
|
||||||
|
} else {
|
||||||
|
if (!target) {
|
||||||
|
target = volume;
|
||||||
|
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
} else {
|
||||||
|
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
predefinedVolumes.push({ id: volume, path: target, predefined: true });
|
||||||
}
|
}
|
||||||
if (!target) {
|
if (typeof volumeName === 'object') {
|
||||||
target = volume;
|
let { source, target } = volumeName;
|
||||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
if (
|
||||||
} else {
|
source.startsWith('.') ||
|
||||||
volume = `${application.id}${volume.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
source.startsWith('..') ||
|
||||||
|
source.startsWith('/') ||
|
||||||
|
source.startsWith('~') ||
|
||||||
|
source.startsWith('$PWD')
|
||||||
|
) {
|
||||||
|
source = source.replace(/^\./, `~`).replace(/^\.\./, '~').replace(/^\$PWD/, '~');
|
||||||
|
} else {
|
||||||
|
if (!target) {
|
||||||
|
target = source;
|
||||||
|
source = `${application.id}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
} else {
|
||||||
|
source = `${application.id}${source.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
predefinedVolumes.push({ id: source, path: target, predefined: true });
|
||||||
}
|
}
|
||||||
predefinedVolumes.push({ id: volume, path: target, predefined: true });
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -88,14 +117,14 @@
|
|||||||
{/key}
|
{/key}
|
||||||
{/each}
|
{/each}
|
||||||
{#if $appSession.isAdmin}
|
{#if $appSession.isAdmin}
|
||||||
<div class:pt-10={predefinedVolumes.length > 0}>
|
<div class:pt-10={predefinedVolumes.length > 0}>
|
||||||
Add New Volume <Explainer
|
Add New Volume <Explainer
|
||||||
position="dropdown-bottom"
|
position="dropdown-bottom"
|
||||||
explanation={$t('application.storage.persistent_storage_explainer')}
|
explanation={$t('application.storage.persistent_storage_explainer')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<Storage on:refresh={refreshStorage} isNew />
|
<Storage on:refresh={refreshStorage} isNew />
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -49,23 +49,23 @@
|
|||||||
databaseDbUser = '';
|
databaseDbUser = '';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function generateUrl() {
|
function ipAddress() {
|
||||||
const ipAddress = () => {
|
if ($status.database.isPublic) {
|
||||||
if ($status.database.isPublic) {
|
if (database.destinationDocker.remoteEngine) {
|
||||||
if (database.destinationDocker.remoteEngine) {
|
return database.destinationDocker.remoteIpAddress;
|
||||||
return database.destinationDocker.remoteIpAddress;
|
|
||||||
}
|
|
||||||
if ($appSession.ipv6) {
|
|
||||||
return $appSession.ipv6;
|
|
||||||
}
|
|
||||||
if ($appSession.ipv4) {
|
|
||||||
return $appSession.ipv4;
|
|
||||||
}
|
|
||||||
return '<Cannot determine public IP address>';
|
|
||||||
} else {
|
|
||||||
return database.id;
|
|
||||||
}
|
}
|
||||||
};
|
if ($appSession.ipv6) {
|
||||||
|
return $appSession.ipv6;
|
||||||
|
}
|
||||||
|
if ($appSession.ipv4) {
|
||||||
|
return $appSession.ipv4;
|
||||||
|
}
|
||||||
|
return '<Cannot determine public IP address>';
|
||||||
|
} else {
|
||||||
|
return database.id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function generateUrl() {
|
||||||
const user = () => {
|
const user = () => {
|
||||||
if (databaseDbUser) {
|
if (databaseDbUser) {
|
||||||
return databaseDbUser + ':';
|
return databaseDbUser + ':';
|
||||||
@@ -183,16 +183,38 @@
|
|||||||
class:cursor-pointer={!$status.database.isRunning}
|
class:cursor-pointer={!$status.database.isRunning}
|
||||||
/></a
|
/></a
|
||||||
>
|
>
|
||||||
<label for="host">{$t('forms.host')}</label>
|
{#if $status.database.isPublic}
|
||||||
<CopyPasswordField
|
<label for="internalHost">Internal Host</label>
|
||||||
placeholder={$t('forms.generated_automatically_after_start')}
|
<CopyPasswordField
|
||||||
isPasswordField={false}
|
isPasswordField={false}
|
||||||
readonly
|
readonly
|
||||||
disabled
|
disabled
|
||||||
id="host"
|
id="internalHost"
|
||||||
name="host"
|
name="internalHost"
|
||||||
value={database.id}
|
value={database.id}
|
||||||
/>
|
/>
|
||||||
|
<label for="host">Public Host</label>
|
||||||
|
<CopyPasswordField
|
||||||
|
placeholder={$t('forms.generated_automatically_after_start')}
|
||||||
|
isPasswordField={false}
|
||||||
|
readonly
|
||||||
|
disabled
|
||||||
|
id="host"
|
||||||
|
name="host"
|
||||||
|
value={loading.public ? 'Loading...' : ipAddress()}
|
||||||
|
/>
|
||||||
|
{:else}
|
||||||
|
<label for="internalHost">Host</label>
|
||||||
|
<CopyPasswordField
|
||||||
|
isPasswordField={false}
|
||||||
|
readonly
|
||||||
|
disabled
|
||||||
|
id="internalHost"
|
||||||
|
name="internalHost"
|
||||||
|
value={database.id}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
|
|
||||||
<label for="publicPort">{$t('forms.port')}</label>
|
<label for="publicPort">{$t('forms.port')}</label>
|
||||||
<CopyPasswordField
|
<CopyPasswordField
|
||||||
placeholder={$t('database.generated_automatically_after_set_to_public')}
|
placeholder={$t('database.generated_automatically_after_set_to_public')}
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- coolify-infra
|
- coolify-infra
|
||||||
fluent-bit:
|
fluent-bit:
|
||||||
image: coollabsio/coolify-fluent-bit:1.0.0
|
image: ghcr.io/coollabsio/fluent-bit:1.0.0
|
||||||
command: /fluent-bit/bin/fluent-bit -c /fluent-bit/etc/fluent-bit-dev.conf
|
command: /fluent-bit/bin/fluent-bit -c /fluent-bit/etc/fluent-bit-dev.conf
|
||||||
container_name: coolify-fluentbit
|
container_name: coolify-fluentbit
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ version: '3.8'
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
coolify:
|
coolify:
|
||||||
image: coollabsio/coolify:${TAG:-latest}
|
image: ghcr.io/coollabsio/coolify:${TAG:-latest}
|
||||||
restart: always
|
restart: always
|
||||||
container_name: coolify
|
container_name: coolify
|
||||||
ports:
|
ports:
|
||||||
@@ -23,7 +23,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- coolify-infra
|
- coolify-infra
|
||||||
fluent-bit:
|
fluent-bit:
|
||||||
image: coollabsio/coolify-fluent-bit:1.0.0
|
image: ghcr.io/coollabsio/fluent-bit:1.0.0
|
||||||
container_name: coolify-fluentbit
|
container_name: coolify-fluentbit
|
||||||
volumes:
|
volumes:
|
||||||
- 'coolify-logs:/app/logs'
|
- 'coolify-logs:/app/logs'
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
FROM fluent/fluent-bit:1.9.8
|
|
||||||
COPY ./fluent-bit.conf /fluent-bit/etc/fluent-bit.conf
|
|
||||||
COPY ./fluent-bit-dev.conf /fluent-bit/etc/fluent-bit-dev.conf
|
|
||||||
COPY ./parsers.conf /fluent-bit/etc/parsers.conf
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
[SERVICE]
|
|
||||||
Parsers_file /fluent-bit/etc/parsers.conf
|
|
||||||
Flush 1
|
|
||||||
Grace 30
|
|
||||||
[INPUT]
|
|
||||||
Name http
|
|
||||||
Host 0.0.0.0
|
|
||||||
Port 24224
|
|
||||||
[FILTER]
|
|
||||||
Name parser
|
|
||||||
Match *
|
|
||||||
Key_Name log
|
|
||||||
Parser jsonparser
|
|
||||||
Reserve_Data True
|
|
||||||
[OUTPUT]
|
|
||||||
Name file
|
|
||||||
Match *
|
|
||||||
Path /logs
|
|
||||||
Mkdir true
|
|
||||||
Format csv
|
|
||||||
# [OUTPUT]
|
|
||||||
# Name influxdb
|
|
||||||
# match *
|
|
||||||
# Host coolify-influxdb
|
|
||||||
# Port 8086
|
|
||||||
# Database coolify
|
|
||||||
# Bucket coolify
|
|
||||||
# Org coolify
|
|
||||||
# HTTP_Token 12345678
|
|
||||||
# Sequence_Tag _seq
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
[SERVICE]
|
|
||||||
Parsers_file /fluent-bit/etc/parsers.conf
|
|
||||||
Flush 1
|
|
||||||
Grace 30
|
|
||||||
[INPUT]
|
|
||||||
Name http
|
|
||||||
Host 0.0.0.0
|
|
||||||
Port 24224
|
|
||||||
[FILTER]
|
|
||||||
Name parser
|
|
||||||
Match *
|
|
||||||
Key_Name log
|
|
||||||
Parser jsonparser
|
|
||||||
Reserve_Data True
|
|
||||||
[OUTPUT]
|
|
||||||
Name file
|
|
||||||
Match *
|
|
||||||
Path /app/logs
|
|
||||||
Mkdir true
|
|
||||||
Format csv
|
|
||||||
# [OUTPUT]
|
|
||||||
# Name influxdb
|
|
||||||
# match *
|
|
||||||
# Host coolify-influxdb
|
|
||||||
# Port 8086
|
|
||||||
# Database coolify
|
|
||||||
# Bucket coolify
|
|
||||||
# Org coolify
|
|
||||||
# HTTP_Token 12345678
|
|
||||||
# Sequence_Tag _seq
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
[PARSER]
|
|
||||||
Name jsonparser
|
|
||||||
Format json
|
|
||||||
Time_Key time
|
|
||||||
Time_Format %Y-%m-%dT%H:%M:%S.%L
|
|
||||||
Time_Keep On
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
FROM alpine:3.17
|
|
||||||
ARG BUILDARCH
|
|
||||||
ARG PB_VERSION=0.12.3
|
|
||||||
RUN apk add --no-cache \
|
|
||||||
unzip \
|
|
||||||
ca-certificates
|
|
||||||
|
|
||||||
ADD https://github.com/pocketbase/pocketbase/releases/download/v${PB_VERSION}/pocketbase_${PB_VERSION}_linux_${BUILDARCH}.zip /tmp/pb.zip
|
|
||||||
RUN unzip /tmp/pb.zip -d /app/
|
|
||||||
RUN rm /tmp/pb.zip
|
|
||||||
EXPOSE 8080
|
|
||||||
CMD ["/app/pocketbase", "serve", "--http=0.0.0.0:8080"]
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "coolify",
|
"name": "coolify",
|
||||||
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
|
||||||
"version": "3.12.24",
|
"version": "3.12.30",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"repository": "github:coollabsio/coolify",
|
"repository": "github:coollabsio/coolify",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
"build:api": "NODE_ENV=production pnpm run --filter api build",
|
"build:api": "NODE_ENV=production pnpm run --filter api build",
|
||||||
"build:ui": "NODE_ENV=production pnpm run --filter ui build",
|
"build:ui": "NODE_ENV=production pnpm run --filter ui build",
|
||||||
"dockerlogin": "echo $DOCKER_PASS | docker login --username=$DOCKER_USER --password-stdin",
|
"dockerlogin": "echo $DOCKER_PASS | docker login --username=$DOCKER_USER --password-stdin",
|
||||||
"release:staging:amd": "docker build -t coollabsio/coolify:next . && docker push coollabsio/coolify:next",
|
"release:staging:amd": "docker build -t ghcr.io/coollabsio/coolify:next . && docker push ghcr.io/coollabsio/coolify:next",
|
||||||
"release:local": "rm -fr ./local-serve && mkdir ./local-serve && pnpm build && cp -Rp apps/api/build/* ./local-serve && cp -Rp apps/ui/build/ ./local-serve/public && cp -Rp apps/api/prisma/ ./local-serve/prisma && cp -Rp apps/api/package.json ./local-serve && env | grep '^COOLIFY_' > ./local-serve/.env && cd ./local-serve && pnpm install . && pnpm start"
|
"release:local": "rm -fr ./local-serve && mkdir ./local-serve && pnpm build && cp -Rp apps/api/build/* ./local-serve && cp -Rp apps/ui/build/ ./local-serve/public && cp -Rp apps/api/prisma/ ./local-serve/prisma && cp -Rp apps/api/package.json ./local-serve && env | grep '^COOLIFY_' > ./local-serve/.env && cd ./local-serve && pnpm install . && pnpm start"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
Reference in New Issue
Block a user