diff --git a/.github/workflows/production-release-dockerhub.yml b/.github/workflows/production-release-dockerhub.yml deleted file mode 100644 index 1a1adaad4..000000000 --- a/.github/workflows/production-release-dockerhub.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Production Release to DockerHub - -on: - push: - branches: - - "this-branch-does-not-exists" - -jobs: - arm64: - runs-on: [self-hosted, arm64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/arm64 - push: true - tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 - cache-from: type=registry,ref=coollabsio/coolify:buildcache-arm64 - cache-to: type=registry,ref=coollabsio/coolify:buildcache-arm64,mode=max - amd64: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Build and push - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/amd64 - push: true - tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}} - cache-from: type=registry,ref=coollabsio/coolify:buildcache-amd64 - cache-to: type=registry,ref=coollabsio/coolify:buildcache-amd64,mode=max - aarch64: - runs-on: [self-hosted, arm64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/aarch64 - push: true - tags: coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 - cache-from: type=registry,ref=coollabsio/coolify:buildcache-aarch64 - cache-to: type=registry,ref=coollabsio/coolify:buildcache-aarch64,mode=max - merge-manifest: - runs-on: ubuntu-latest - needs: [amd64, arm64, aarch64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Create & publish manifest - run: | - docker buildx imagetools create --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-arm64 --append coollabsio/coolify:${{steps.package-version.outputs.current-version}}-aarch64 --tag coollabsio/coolify:${{steps.package-version.outputs.current-version}} - docker buildx imagetools create coollabsio/coolify:${{steps.package-version.outputs.current-version}} --tag coollabsio/coolify:latest - - uses: sarisia/actions-status-discord@v1 - if: always() - with: - webhook: ${{ secrets.DISCORD_WEBHOOK_PROD_RELEASE_CHANNEL }} diff --git a/.github/workflows/production-release.yml b/.github/workflows/production-release.yml index 1adc0cb47..0ab618968 100644 --- a/.github/workflows/production-release.yml +++ b/.github/workflows/production-release.yml @@ -14,6 +14,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v3 + with: + ref: "v3" - name: Set up QEMU uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx @@ -44,6 +46,8 @@ jobs: steps: - name: Checkout uses: actions/checkout@v3 + with: + ref: "v3" - name: Set up QEMU uses: docker/setup-qemu-action@v1 - name: Set up Docker Buildx @@ -95,7 +99,6 @@ jobs: - name: Create & publish manifest run: | docker buildx imagetools create --append ${{ fromJSON(steps.meta.outputs.json).tags[0] }}-aarch64 --tag ${{ fromJSON(steps.meta.outputs.json).tags[0] }} - docker buildx imagetools create --append ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-aarch64 --tag ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest - uses: sarisia/actions-status-discord@v1 if: always() with: diff --git a/.github/workflows/release-candidate.yml b/.github/workflows/release-candidate.yml deleted file mode 100644 index e69dd009c..000000000 --- a/.github/workflows/release-candidate.yml +++ /dev/null @@ -1,110 +0,0 @@ -name: Release Candidate to ghcr.io - -on: - release: - types: [prereleased] - -env: - REGISTRY: ghcr.io - IMAGE_NAME: "coollabsio/coolify" - -jobs: - amd64: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to ghcr.io - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - - name: Build and push - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/amd64 - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - aarch64: - runs-on: [self-hosted, arm64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to ghcr.io - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - - name: Build and push - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/aarch64 - push: true - tags: ${{ steps.meta.outputs.tags }}-aarch64 - labels: ${{ steps.meta.outputs.labels }} - merge-manifest: - runs-on: ubuntu-latest - needs: [amd64, aarch64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to ghcr.io - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Extract metadata - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - - name: Create & publish manifest - run: | - docker buildx imagetools create --append ${{ steps.meta.outputs.tags }}-aarch64 --tag ${{ steps.meta.outputs.tags }} - - uses: sarisia/actions-status-discord@v1 - if: always() - with: - webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }} diff --git a/.github/workflows/staging-release-dockerhub.yml b/.github/workflows/staging-release-dockerhub.yml deleted file mode 100644 index 111cd54a0..000000000 --- a/.github/workflows/staging-release-dockerhub.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Staging Release to DockerHub - -on: - push: - branches: - - "this-branch-does-not-exists" - -jobs: - arm64: - runs-on: [self-hosted, arm64] - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: "next" - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Login to DockerHub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Build and push - uses: docker/build-push-action@v2 - with: - context: . - platforms: linux/arm64 - push: true - tags: coollabsio/coolify:next-arm64 - cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-arm64 - cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-arm64,mode=max - amd64: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: "next" - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Get current package version - uses: martinbeentjes/npm-get-version-action@v1.2.3 - id: package-version - - name: Build and push - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/amd64 - push: true - tags: coollabsio/coolify:next - cache-from: type=registry,ref=coollabsio/coolify:buildcache-next-amd64 - cache-to: type=registry,ref=coollabsio/coolify:buildcache-next-amd64,mode=max - merge-manifest: - runs-on: ubuntu-latest - needs: [arm64, amd64] - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Create & publish manifest - run: | - docker buildx imagetools create --append coollabsio/coolify:next-arm64 --tag coollabsio/coolify:next - - uses: sarisia/actions-status-discord@v1 - if: always() - with: - webhook: ${{ secrets.DISCORD_WEBHOOK_DEV_RELEASE_CHANNEL }} diff --git a/.github/workflows/staging-release.yml b/.github/workflows/staging-release.yml index dfd96e9bb..09248632d 100644 --- a/.github/workflows/staging-release.yml +++ b/.github/workflows/staging-release.yml @@ -18,7 +18,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 with: - ref: "next" + ref: "v3" - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Login to ghcr.io @@ -47,7 +47,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 with: - ref: "next" + ref: "v3" - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Login to ghcr.io diff --git a/Dockerfile b/Dockerfile index 80f56f06a..e45a29852 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,7 @@ ARG DOCKER_COMPOSE_VERSION=2.6.1 # https://github.com/buildpacks/pack/releases ARG PACK_VERSION=0.27.0 -RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3 +RUN apt update && apt -y install --no-install-recommends ca-certificates git git-lfs openssh-client curl jq cmake sqlite3 openssl psmisc python3 vim RUN apt-get clean autoclean && apt-get autoremove --yes && rm -rf /var/lib/{apt,dpkg,cache,log}/ RUN npm --no-update-notifier --no-fund --global install pnpm@${PNPM_VERSION} RUN npm install -g npm@${PNPM_VERSION} diff --git a/README.md b/README.md index 56b458595..bf54462bd 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ If you have a new service / build pack you would like to add, raise an idea [her ## How to install -For more details goto the [docs](https://docs.coollabs.io/coolify/installation). +For more details goto the [docs](https://docs.coollabs.io/coolify-v3/installation). Installation is automated with the following command: @@ -79,9 +79,9 @@ Deploy your resource to: ### Services - [Appwrite](https://appwrite.io) -- [WordPress](https://docs.coollabs.io/coolify/services/wordpress) +- [WordPress](https://docs.coollabs.io/coolify-v3/services/wordpress) - [Ghost](https://ghost.org) -- [Plausible Analytics](https://docs.coollabs.io/coolify/services/plausible-analytics) +- [Plausible Analytics](https://docs.coollabs.io/coolify-v3/services/plausible-analytics) - [NocoDB](https://nocodb.com) - [VSCode Server](https://github.com/cdr/code-server) - [MinIO](https://min.io) diff --git a/apps/api/package.json b/apps/api/package.json index 86f78ab39..32b303b4f 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -26,8 +26,6 @@ "@iarna/toml": "2.2.5", "@ladjs/graceful": "3.2.1", "@prisma/client": "4.8.1", - "@sentry/node": "7.30.0", - "@sentry/tracing": "7.30.0", "axe": "11.2.1", "bcryptjs": "2.4.3", "bree": "9.1.3", diff --git a/apps/api/prisma/seed.js b/apps/api/prisma/seed.js index 3d4b16a28..06affc8a7 100644 --- a/apps/api/prisma/seed.js +++ b/apps/api/prisma/seed.js @@ -12,7 +12,7 @@ async function main() { await prisma.setting.create({ data: { id: '0', - arch: process.arch, + arch: process.arch } }); } else { @@ -81,16 +81,246 @@ async function main() { }); } // Set new preview secrets - const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } }) + const secrets = await prisma.secret.findMany({ where: { isPRMRSecret: false } }); if (secrets.length > 0) { for (const secret of secrets) { - const previewSecrets = await prisma.secret.findMany({ where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true } }) + const previewSecrets = await prisma.secret.findMany({ + where: { applicationId: secret.applicationId, name: secret.name, isPRMRSecret: true } + }); if (previewSecrets.length === 0) { - await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } }) + await prisma.secret.create({ data: { ...secret, id: undefined, isPRMRSecret: true } }); } } } } +async function reEncryptSecrets() { + const { execaCommand } = await import('execa'); + const date = new Date().getTime(); + await execaCommand('env | grep COOLIFY > .env', { shell: true }); + const secretOld = process.env['COOLIFY_SECRET_KEY']; + let secretNew = process.env['COOLIFY_SECRET_KEY_BETTER']; + if (!secretNew) { + console.log('No COOLIFY_SECRET_KEY_BETTER found... Generating new one...'); + const { stdout: newKey } = await execaCommand( + 'openssl rand -base64 1024 | sha256sum | base64 | head -c 32', + { shell: true } + ); + secretNew = newKey; + } + if (secretOld !== secretNew) { + console.log( + 'Secrets (COOLIFY_SECRET_KEY & COOLIFY_SECRET_KEY_BETTER) are different, so re-encrypting everything...' + ); + await execaCommand(`sed -i '/COOLIFY_SECRET_KEY=/d' .env`, { shell: true }); + await execaCommand(`sed -i '/COOLIFY_SECRET_KEY_BETTER=/d' .env`, { shell: true }); + await execaCommand(`echo "COOLIFY_SECRET_KEY=${secretNew}" >> .env`, { shell: true }); + await execaCommand('echo "COOLIFY_SECRET_KEY_BETTER=' + secretNew + '" >> .env ', { + shell: true + }); + await execaCommand(`echo "COOLIFY_SECRET_KEY_OLD_${date}=${secretOld}" >> .env`, { + shell: true + }); + console.log(`Backup database to /app/db/prod.db_${date}.`); + await execaCommand(`cp /app/db/prod.db /app/db/prod.db_${date}`, { shell: true }); + const transactions = []; + const secrets = await prisma.secret.findMany(); + if (secrets.length > 0) { + for (const secret of secrets) { + const value = decrypt(secret.value, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.secret.update({ + where: { id: secret.id }, + data: { value: newValue } + }) + ); + } + } + const serviceSecrets = await prisma.serviceSecret.findMany(); + if (serviceSecrets.length > 0) { + for (const secret of serviceSecrets) { + const value = decrypt(secret.value, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.serviceSecret.update({ + where: { id: secret.id }, + data: { value: newValue } + }) + ); + } + } + const gitlabApps = await prisma.gitlabApp.findMany(); + if (gitlabApps.length > 0) { + for (const gitlabApp of gitlabApps) { + const value = decrypt(gitlabApp.privateSshKey, secretOld); + const newValue = encrypt(value, secretNew); + const appSecret = decrypt(gitlabApp.appSecret, secretOld); + const newAppSecret = encrypt(appSecret, secretNew); + transactions.push( + prisma.gitlabApp.update({ + where: { id: gitlabApp.id }, + data: { privateSshKey: newValue, appSecret: newAppSecret } + }) + ); + } + } + const githubApps = await prisma.githubApp.findMany(); + if (githubApps.length > 0) { + for (const githubApp of githubApps) { + const clientSecret = decrypt(githubApp.clientSecret, secretOld); + const newClientSecret = encrypt(clientSecret, secretNew); + const webhookSecret = decrypt(githubApp.webhookSecret, secretOld); + const newWebhookSecret = encrypt(webhookSecret, secretNew); + const privateKey = decrypt(githubApp.privateKey, secretOld); + const newPrivateKey = encrypt(privateKey, secretNew); + + transactions.push( + prisma.githubApp.update({ + where: { id: githubApp.id }, + data: { + clientSecret: newClientSecret, + webhookSecret: newWebhookSecret, + privateKey: newPrivateKey + } + }) + ); + } + } + const databases = await prisma.database.findMany(); + if (databases.length > 0) { + for (const database of databases) { + const dbUserPassword = decrypt(database.dbUserPassword, secretOld); + const newDbUserPassword = encrypt(dbUserPassword, secretNew); + const rootUserPassword = decrypt(database.rootUserPassword, secretOld); + const newRootUserPassword = encrypt(rootUserPassword, secretNew); + transactions.push( + prisma.database.update({ + where: { id: database.id }, + data: { + dbUserPassword: newDbUserPassword, + rootUserPassword: newRootUserPassword + } + }) + ); + } + } + const databaseSecrets = await prisma.databaseSecret.findMany(); + if (databaseSecrets.length > 0) { + for (const databaseSecret of databaseSecrets) { + const value = decrypt(databaseSecret.value, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.databaseSecret.update({ + where: { id: databaseSecret.id }, + data: { value: newValue } + }) + ); + } + } + const wordpresses = await prisma.wordpress.findMany(); + if (wordpresses.length > 0) { + for (const wordpress of wordpresses) { + const value = decrypt(wordpress.ftpHostKey, secretOld); + const newValue = encrypt(value, secretNew); + const ftpHostKeyPrivate = decrypt(wordpress.ftpHostKeyPrivate, secretOld); + const newFtpHostKeyPrivate = encrypt(ftpHostKeyPrivate, secretNew); + let newFtpPassword = undefined; + if (wordpress.ftpPassword != null) { + const ftpPassword = decrypt(wordpress.ftpPassword, secretOld); + newFtpPassword = encrypt(ftpPassword, secretNew); + } + + transactions.push( + prisma.wordpress.update({ + where: { id: wordpress.id }, + data: { + ftpHostKey: newValue, + ftpHostKeyPrivate: newFtpHostKeyPrivate, + ftpPassword: newFtpPassword + } + }) + ); + } + } + const sshKeys = await prisma.sshKey.findMany(); + if (sshKeys.length > 0) { + for (const key of sshKeys) { + const value = decrypt(key.privateKey, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.sshKey.update({ + where: { id: key.id }, + data: { + privateKey: newValue + } + }) + ); + } + } + const dockerRegistries = await prisma.dockerRegistry.findMany(); + if (dockerRegistries.length > 0) { + for (const registry of dockerRegistries) { + const value = decrypt(registry.password, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.dockerRegistry.update({ + where: { id: registry.id }, + data: { + password: newValue + } + }) + ); + } + } + const certificates = await prisma.certificate.findMany(); + if (certificates.length > 0) { + for (const certificate of certificates) { + const value = decrypt(certificate.key, secretOld); + const newValue = encrypt(value, secretNew); + transactions.push( + prisma.certificate.update({ + where: { id: certificate.id }, + data: { + key: newValue + } + }) + ); + } + } + await prisma.$transaction(transactions); + } else { + console.log('secrets are the same, so no need to re-encrypt'); + } +} + +const encrypt = (text, secret) => { + if (text && secret) { + const iv = crypto.randomBytes(16); + const cipher = crypto.createCipheriv(algorithm, secret, iv); + const encrypted = Buffer.concat([cipher.update(text.trim()), cipher.final()]); + return JSON.stringify({ + iv: iv.toString('hex'), + content: encrypted.toString('hex') + }); + } +}; +const decrypt = (hashString, secret) => { + if (hashString && secret) { + try { + const hash = JSON.parse(hashString); + const decipher = crypto.createDecipheriv(algorithm, secret, Buffer.from(hash.iv, 'hex')); + const decrpyted = Buffer.concat([ + decipher.update(Buffer.from(hash.content, 'hex')), + decipher.final() + ]); + return decrpyted.toString(); + } catch (error) { + console.log({ decryptionError: error.message }); + return hashString; + } + } +}; + main() .catch((e) => { console.error(e); @@ -99,15 +329,11 @@ main() .finally(async () => { await prisma.$disconnect(); }); - -const encrypt = (text) => { - if (text) { - const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv); - const encrypted = Buffer.concat([cipher.update(text), cipher.final()]); - return JSON.stringify({ - iv: iv.toString('hex'), - content: encrypted.toString('hex') - }); - } -}; \ No newline at end of file +reEncryptSecrets() + .catch((e) => { + console.error(e); + process.exit(1); + }) + .finally(async () => { + await prisma.$disconnect(); + }); diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index 18d2e6fc4..3756672e1 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -1,14 +1,19 @@ -import Fastify from 'fastify'; -import cors from '@fastify/cors'; -import serve from '@fastify/static'; -import env from '@fastify/env'; -import cookie from '@fastify/cookie'; -import multipart from '@fastify/multipart'; -import path, { join } from 'path'; import autoLoad from '@fastify/autoload'; +import cookie from '@fastify/cookie'; +import cors from '@fastify/cors'; +import env from '@fastify/env'; +import multipart from '@fastify/multipart'; +import serve from '@fastify/static'; +import Fastify from 'fastify'; import socketIO from 'fastify-socket.io'; +import path, { join } from 'path'; import socketIOServer from './realtime'; +import Graceful from '@ladjs/graceful'; +import { compareVersions } from 'compare-versions'; +import fs from 'fs/promises'; +import yaml from 'js-yaml'; +import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib'; import { cleanupDockerStorage, createRemoteEngineConfiguration, @@ -18,26 +23,20 @@ import { isDev, listSettings, prisma, - sentryDSN, startTraefikProxy, startTraefikTCPProxy, version } from './lib/common'; -import { scheduler } from './lib/scheduler'; -import { compareVersions } from 'compare-versions'; -import Graceful from '@ladjs/graceful'; -import yaml from 'js-yaml'; -import fs from 'fs/promises'; -import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers'; import { checkContainer } from './lib/docker'; -import { migrateApplicationPersistentStorage, migrateServicesToNewTemplate } from './lib'; +import { scheduler } from './lib/scheduler'; +import { verifyRemoteDockerEngineFn } from './routes/api/v1/destinations/handlers'; import { refreshTags, refreshTemplates } from './routes/api/v1/handlers'; -import * as Sentry from '@sentry/node'; declare module 'fastify' { interface FastifyInstance { config: { COOLIFY_APP_ID: string; COOLIFY_SECRET_KEY: string; + COOLIFY_SECRET_KEY_BETTER: string | null; COOLIFY_DATABASE_URL: string; COOLIFY_IS_ON: string; COOLIFY_WHITE_LABELED: string; @@ -67,6 +66,10 @@ const host = '0.0.0.0'; COOLIFY_SECRET_KEY: { type: 'string' }, + COOLIFY_SECRET_KEY_BETTER: { + type: 'string', + default: null + }, COOLIFY_DATABASE_URL: { type: 'string', default: 'file:../db/dev.db' @@ -185,17 +188,17 @@ const host = '0.0.0.0'; // Refresh and check templates setInterval(async () => { await refreshTemplates(); - }, 60000); + }, 60000 * 10); setInterval(async () => { await refreshTags(); - }, 60000); + }, 60000 * 10); setInterval( async () => { await migrateServicesToNewTemplate(); }, - isDev ? 10000 : 60000 + isDev ? 10000 : 60000 * 10 ); setInterval(async () => { @@ -230,7 +233,7 @@ async function getIPAddress() { console.log(`Getting public IPv6 address...`); await prisma.setting.update({ where: { id: settings.id }, data: { ipv6 } }); } - } catch (error) {} + } catch (error) { } } async function getTagsTemplates() { const { default: got } = await import('got'); @@ -242,7 +245,7 @@ async function getTagsTemplates() { if (await fs.stat('./testTemplate.yaml')) { templates = templates + (await fs.readFile('./testTemplate.yaml', 'utf8')); } - } catch (error) {} + } catch (error) { } try { if (await fs.stat('./testTags.json')) { const testTags = await fs.readFile('./testTags.json', 'utf8'); @@ -250,7 +253,7 @@ async function getTagsTemplates() { tags = JSON.stringify(JSON.parse(tags).concat(JSON.parse(testTags))); } } - } catch (error) {} + } catch (error) { } await fs.writeFile('./templates.json', JSON.stringify(yaml.load(templates))); await fs.writeFile('./tags.json', tags); @@ -276,9 +279,6 @@ async function initServer() { if (settings.doNotTrack === true) { console.log('[000] Telemetry disabled...'); } else { - if (settings.sentryDSN !== sentryDSN) { - await prisma.setting.update({ where: { id: '0' }, data: { sentryDSN } }); - } // Initialize Sentry // Sentry.init({ // dsn: sentryDSN, @@ -293,7 +293,7 @@ async function initServer() { try { console.log(`[001] Initializing server...`); await executeCommand({ command: `docker network create --attachable coolify` }); - } catch (error) {} + } catch (error) { } try { console.log(`[002] Cleanup stucked builds...`); const isOlder = compareVersions('3.8.1', version); @@ -303,7 +303,7 @@ async function initServer() { data: { status: 'failed' } }); } - } catch (error) {} + } catch (error) { } try { console.log('[003] Cleaning up old build sources under /tmp/build-sources/...'); if (!isDev) await fs.rm('/tmp/build-sources', { recursive: true, force: true }); @@ -319,7 +319,7 @@ async function getArch() { console.log(`Getting architecture...`); await prisma.setting.update({ where: { id: settings.id }, data: { arch: process.arch } }); } - } catch (error) {} + } catch (error) { } } async function cleanupStuckedContainers() { @@ -402,7 +402,9 @@ async function autoUpdater() { if (!isDev) { const { isAutoUpdateEnabled } = await prisma.setting.findFirst(); if (isAutoUpdateEnabled) { - await executeCommand({ command: `docker pull ghcr.io/coollabsio/coolify:${latestVersion}` }); + await executeCommand({ + command: `docker pull ghcr.io/coollabsio/coolify:${latestVersion}` + }); await executeCommand({ shell: true, command: `env | grep '^COOLIFY' > .env` }); await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` @@ -475,7 +477,7 @@ async function checkProxies() { } try { await createRemoteEngineConfiguration(docker.id); - } catch (error) {} + } catch (error) { } } } // TCP Proxies @@ -514,7 +516,7 @@ async function checkProxies() { // await startTraefikTCPProxy(destinationDocker, id, publicPort, 9000); // } // } - } catch (error) {} + } catch (error) { } } async function copySSLCertificates() { @@ -546,7 +548,11 @@ async function copySSLCertificates() { } catch (error) { console.log(error); } finally { - await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` }); + try { + await executeCommand({ command: `find /tmp/ -maxdepth 1 -type f -name '*-*.pem' -delete` }); + } catch (e) { + console.log(e); + } } } @@ -604,53 +610,54 @@ async function cleanupStorage() { if (!destination.remoteVerified) continue; enginesDone.add(destination.remoteIpAddress); } - let lowDiskSpace = false; - try { - let stdout = null; - if (!isDev) { - const output = await executeCommand({ - dockerId: destination.id, - command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`, - shell: true - }); - stdout = output.stdout; - } else { - const output = await executeCommand({ - command: `df -kPT /` - }); - stdout = output.stdout; - } - let lines = stdout.trim().split('\n'); - let header = lines[0]; - let regex = - /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g; - const boundaries = []; - let match; + await cleanupDockerStorage(destination.id); + // let lowDiskSpace = false; + // try { + // let stdout = null; + // if (!isDev) { + // const output = await executeCommand({ + // dockerId: destination.id, + // command: `CONTAINER=$(docker ps -lq | head -1) && docker exec $CONTAINER sh -c 'df -kPT /'`, + // shell: true + // }); + // stdout = output.stdout; + // } else { + // const output = await executeCommand({ + // command: `df -kPT /` + // }); + // stdout = output.stdout; + // } + // let lines = stdout.trim().split('\n'); + // let header = lines[0]; + // let regex = + // /^Filesystem\s+|Type\s+|1024-blocks|\s+Used|\s+Available|\s+Capacity|\s+Mounted on\s*$/g; + // const boundaries = []; + // let match; - while ((match = regex.exec(header))) { - boundaries.push(match[0].length); - } + // while ((match = regex.exec(header))) { + // boundaries.push(match[0].length); + // } - boundaries[boundaries.length - 1] = -1; - const data = lines.slice(1).map((line) => { - const cl = boundaries.map((boundary) => { - const column = boundary > 0 ? line.slice(0, boundary) : line; - line = line.slice(boundary); - return column.trim(); - }); - return { - capacity: Number.parseInt(cl[5], 10) / 100 - }; - }); - if (data.length > 0) { - const { capacity } = data[0]; - if (capacity > 0.8) { - lowDiskSpace = true; - } - } - } catch (error) {} - if (lowDiskSpace) { - await cleanupDockerStorage(destination.id); - } + // boundaries[boundaries.length - 1] = -1; + // const data = lines.slice(1).map((line) => { + // const cl = boundaries.map((boundary) => { + // const column = boundary > 0 ? line.slice(0, boundary) : line; + // line = line.slice(boundary); + // return column.trim(); + // }); + // return { + // capacity: Number.parseInt(cl[5], 10) / 100 + // }; + // }); + // if (data.length > 0) { + // const { capacity } = data[0]; + // if (capacity > 0.8) { + // lowDiskSpace = true; + // } + // } + // } catch (error) {} + // if (lowDiskSpace) { + // await cleanupDockerStorage(destination.id); + // } } } diff --git a/apps/api/src/lib/buildPacks/compose.ts b/apps/api/src/lib/buildPacks/compose.ts index 136ea9814..9f5288463 100644 --- a/apps/api/src/lib/buildPacks/compose.ts +++ b/apps/api/src/lib/buildPacks/compose.ts @@ -52,7 +52,6 @@ export default async function (data) { } let environment = typeof value['environment'] === 'undefined' ? [] : value['environment']; - console.log({ key, environment }); if (Object.keys(environment).length > 0) { environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`); } diff --git a/apps/api/src/lib/common.ts b/apps/api/src/lib/common.ts index dd17f1c05..453036788 100644 --- a/apps/api/src/lib/common.ts +++ b/apps/api/src/lib/common.ts @@ -1,6 +1,5 @@ -import { exec } from 'node:child_process'; -import util from 'util'; import fs from 'fs/promises'; +import fsNormal from 'fs'; import yaml from 'js-yaml'; import forge from 'node-forge'; import { uniqueNamesGenerator, adjectives, colors, animals } from 'unique-names-generator'; @@ -8,7 +7,6 @@ import type { Config } from 'unique-names-generator'; import generator from 'generate-password'; import crypto from 'crypto'; import { promises as dns } from 'dns'; -import * as Sentry from '@sentry/node'; import { PrismaClient } from '@prisma/client'; import os from 'os'; import * as SSHConfig from 'ssh-config/src/ssh-config'; @@ -18,13 +16,13 @@ import { day } from './dayjs'; import { saveBuildLog } from './buildPacks/common'; import { scheduler } from './scheduler'; import type { ExecaChildProcess } from 'execa'; +import { FastifyReply } from 'fastify'; -export const version = '3.12.31'; +export const version = '3.12.34'; export const isDev = process.env.NODE_ENV === 'development'; export const proxyPort = process.env.COOLIFY_PROXY_PORT; export const proxySecurePort = process.env.COOLIFY_PROXY_SECURE_PORT; -export const sentryDSN = - 'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216'; + const algorithm = 'aes-256-ctr'; const customConfig: Config = { dictionaries: [adjectives, colors, animals], @@ -172,13 +170,19 @@ export const base64Encode = (text: string): string => { export const base64Decode = (text: string): string => { return Buffer.from(text, 'base64').toString('ascii'); }; +export const getSecretKey = () => { + if (process.env['COOLIFY_SECRET_KEY_BETTER']) { + return process.env['COOLIFY_SECRET_KEY_BETTER']; + } + return process.env['COOLIFY_SECRET_KEY']; +}; export const decrypt = (hashString: string) => { if (hashString) { try { const hash = JSON.parse(hashString); const decipher = crypto.createDecipheriv( algorithm, - process.env['COOLIFY_SECRET_KEY'], + getSecretKey(), Buffer.from(hash.iv, 'hex') ); const decrpyted = Buffer.concat([ @@ -195,7 +199,7 @@ export const decrypt = (hashString: string) => { export const encrypt = (text: string) => { if (text) { const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv(algorithm, process.env['COOLIFY_SECRET_KEY'], iv); + const cipher = crypto.createCipheriv(algorithm, getSecretKey(), iv); const encrypted = Buffer.concat([cipher.update(text.trim()), cipher.final()]); return JSON.stringify({ iv: iv.toString('hex'), @@ -579,7 +583,8 @@ export async function executeCommand({ stream = false, buildId, applicationId, - debug + debug, + timeout = 0 }: { command: string; sshCommand?: boolean; @@ -589,6 +594,7 @@ export async function executeCommand({ buildId?: string; applicationId?: string; debug?: boolean; + timeout?: number; }): Promise> { const { execa, execaCommand } = await import('execa'); const { parse } = await import('shell-quote'); @@ -613,20 +619,26 @@ export async function executeCommand({ } if (sshCommand) { if (shell) { - return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`); + return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`, { + timeout + }); } - return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]); + return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs], { + timeout + }); } if (stream) { return await new Promise(async (resolve, reject) => { let subprocess = null; if (shell) { subprocess = execaCommand(command, { - env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }, + timeout }); } else { subprocess = execa(dockerCommand, dockerArgs, { - env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }, + timeout }); } const logs = []; @@ -680,19 +692,26 @@ export async function executeCommand({ } else { if (shell) { return await execaCommand(command, { - env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }, + timeout }); } else { return await execa(dockerCommand, dockerArgs, { - env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine } + env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }, + timeout }); } } } else { if (shell) { - return execaCommand(command, { shell: true }); + return execaCommand(command, { + shell: true, + timeout + }); } - return await execa(dockerCommand, dockerArgs); + return await execa(dockerCommand, dockerArgs, { + timeout + }); } } @@ -826,7 +845,7 @@ export function generateToken() { { nbf: Math.floor(Date.now() / 1000) - 30 }, - process.env['COOLIFY_SECRET_KEY'] + getSecretKey() ); } export function generatePassword({ @@ -849,97 +868,97 @@ export function generatePassword({ type DatabaseConfiguration = | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MYSQL_DATABASE: string; - MYSQL_PASSWORD: string; - MYSQL_ROOT_USER: string; - MYSQL_USER: string; - MYSQL_ROOT_PASSWORD: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MYSQL_DATABASE: string; + MYSQL_PASSWORD: string; + MYSQL_ROOT_USER: string; + MYSQL_USER: string; + MYSQL_ROOT_PASSWORD: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MONGO_INITDB_ROOT_USERNAME?: string; - MONGO_INITDB_ROOT_PASSWORD?: string; - MONGODB_ROOT_USER?: string; - MONGODB_ROOT_PASSWORD?: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MONGO_INITDB_ROOT_USERNAME?: string; + MONGO_INITDB_ROOT_PASSWORD?: string; + MONGODB_ROOT_USER?: string; + MONGODB_ROOT_PASSWORD?: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - MARIADB_ROOT_USER: string; - MARIADB_ROOT_PASSWORD: string; - MARIADB_USER: string; - MARIADB_PASSWORD: string; - MARIADB_DATABASE: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + MARIADB_ROOT_USER: string; + MARIADB_ROOT_PASSWORD: string; + MARIADB_USER: string; + MARIADB_PASSWORD: string; + MARIADB_DATABASE: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - POSTGRES_PASSWORD?: string; - POSTGRES_USER?: string; - POSTGRES_DB?: string; - POSTGRESQL_POSTGRES_PASSWORD?: string; - POSTGRESQL_USERNAME?: string; - POSTGRESQL_PASSWORD?: string; - POSTGRESQL_DATABASE?: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + POSTGRES_PASSWORD?: string; + POSTGRES_USER?: string; + POSTGRES_DB?: string; + POSTGRESQL_POSTGRES_PASSWORD?: string; + POSTGRESQL_USERNAME?: string; + POSTGRESQL_PASSWORD?: string; + POSTGRESQL_DATABASE?: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - REDIS_AOF_ENABLED: string; - REDIS_PASSWORD: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + REDIS_AOF_ENABLED: string; + REDIS_PASSWORD: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - COUCHDB_PASSWORD: string; - COUCHDB_USER: string; - }; - } + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + COUCHDB_PASSWORD: string; + COUCHDB_USER: string; + }; + } | { - volume: string; - image: string; - command?: string; - ulimits: Record; - privatePort: number; - environmentVariables: { - EDGEDB_SERVER_PASSWORD: string; - EDGEDB_SERVER_USER: string; - EDGEDB_SERVER_DATABASE: string; - EDGEDB_SERVER_TLS_CERT_MODE: string; - }; - }; + volume: string; + image: string; + command?: string; + ulimits: Record; + privatePort: number; + environmentVariables: { + EDGEDB_SERVER_PASSWORD: string; + EDGEDB_SERVER_USER: string; + EDGEDB_SERVER_DATABASE: string; + EDGEDB_SERVER_TLS_CERT_MODE: string; + }; + }; export function generateDatabaseConfiguration(database: any): DatabaseConfiguration { const { id, dbUser, dbUserPassword, rootUser, rootUserPassword, defaultDatabase, version, type } = database; @@ -1038,9 +1057,8 @@ export function generateDatabaseConfiguration(database: any): DatabaseConfigurat }; if (isARM()) { configuration.volume = `${id}-${type}-data:/data`; - configuration.command = `/usr/local/bin/redis-server --appendonly ${ - appendOnly ? 'yes' : 'no' - } --requirepass ${dbUserPassword}`; + configuration.command = `/usr/local/bin/redis-server --appendonly ${appendOnly ? 'yes' : 'no' + } --requirepass ${dbUserPassword}`; } return configuration; } else if (type === 'couchdb') { @@ -1125,12 +1143,12 @@ export type ComposeFileService = { command?: string; ports?: string[]; build?: - | { - context: string; - dockerfile: string; - args?: Record; - } - | string; + | { + context: string; + dockerfile: string; + args?: Record; + } + | string; deploy?: { restart_policy?: { condition?: string; @@ -1201,7 +1219,7 @@ export const createDirectories = async ({ let workdirFound = false; try { workdirFound = !!(await fs.stat(workdir)); - } catch (error) {} + } catch (error) { } if (workdirFound) { await executeCommand({ command: `rm -fr ${workdir}` }); } @@ -1664,9 +1682,6 @@ export function errorHandler({ if (message.includes('Unique constraint failed')) { message = 'This data is unique and already exists. Please try again with a different value.'; } - if (type === 'normal') { - Sentry.captureException(message); - } throw { status, message }; } export async function generateSshKeyPair(): Promise<{ publicKey: string; privateKey: string }> { @@ -1728,7 +1743,7 @@ export async function stopBuild(buildId, applicationId) { } } count++; - } catch (error) {} + } catch (error) { } }, 100); }); } @@ -1751,7 +1766,7 @@ export async function cleanupDockerStorage(dockerId) { // Cleanup images that are not used by any container try { await executeCommand({ dockerId, command: `docker image prune -af` }); - } catch (error) {} + } catch (error) { } // Prune coolify managed containers try { @@ -1759,12 +1774,12 @@ export async function cleanupDockerStorage(dockerId) { dockerId, command: `docker container prune -f --filter "label=coolify.managed=true"` }); - } catch (error) {} + } catch (error) { } // Cleanup build caches try { await executeCommand({ dockerId, command: `docker builder prune -af` }); - } catch (error) {} + } catch (error) { } } export function persistentVolumes(id, persistentStorage, config) { @@ -1927,3 +1942,51 @@ export function generateSecrets( } return envs; } + +export async function backupPostgresqlDatabase(database, reply) { + const backupFolder = '/tmp' + const fileName = `${database.id}-${new Date().getTime()}.gz` + const backupFileName = `${backupFolder}/${fileName}` + console.log({ database }) + let command = null + switch (database?.type) { + case 'postgresql': + command = `docker exec ${database.id} sh -c "PGPASSWORD=${database.rootUserPassword} pg_dumpall -U postgres | gzip > ${backupFileName}"` + break; + case 'mongodb': + command = `docker exec ${database.id} sh -c "mongodump --archive=${backupFileName} --gzip --username=${database.rootUser} --password=${database.rootUserPassword}"` + break; + case 'mysql': + command = `docker exec ${database.id} sh -c "mysqldump --all-databases --single-transaction --quick --lock-tables=false --user=${database.rootUser} --password=${database.rootUserPassword} | gzip > ${backupFileName}"` + break; + case 'mariadb': + command = `docker exec ${database.id} sh -c "mysqldump --all-databases --single-transaction --quick --lock-tables=false --user=${database.rootUser} --password=${database.rootUserPassword} | gzip > ${backupFileName}"` + break; + case 'couchdb': + command = `docker exec ${database.id} sh -c "tar -czvf ${backupFileName} /bitnami/couchdb/data"` + break; + default: + return; + } + await executeCommand({ + dockerId: database.destinationDockerId, + command, + }); + const copyCommand = `docker cp ${database.id}:${backupFileName} ${backupFileName}` + await executeCommand({ + dockerId: database.destinationDockerId, + command: copyCommand + }); + if (isDev) { + await executeCommand({ + dockerId: database.destinationDockerId, + command: `docker cp ${database.id}:${backupFileName} /app/backups/` + }); + } + const stream = fsNormal.createReadStream(backupFileName); + reply.header('Content-Type', 'application/octet-stream'); + reply.header('Content-Disposition', `attachment; filename=${fileName}`); + reply.header('Content-Length', fsNormal.statSync(backupFileName).size); + reply.header('Content-Transfer-Encoding', 'binary'); + return reply.send(stream) +} diff --git a/apps/api/src/plugins/jwt.ts b/apps/api/src/plugins/jwt.ts index 029aecd94..1a50fca4f 100644 --- a/apps/api/src/plugins/jwt.ts +++ b/apps/api/src/plugins/jwt.ts @@ -1,33 +1,37 @@ -import fp from 'fastify-plugin' -import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt' +import fp from 'fastify-plugin'; +import fastifyJwt, { FastifyJWTOptions } from '@fastify/jwt'; -declare module "@fastify/jwt" { - interface FastifyJWT { - user: { - userId: string, - teamId: string, - permission: string, - isAdmin: boolean - } - } +declare module '@fastify/jwt' { + interface FastifyJWT { + user: { + userId: string; + teamId: string; + permission: string; + isAdmin: boolean; + }; + } } export default fp(async (fastify, opts) => { - fastify.register(fastifyJwt, { - secret: fastify.config.COOLIFY_SECRET_KEY - }) + let secretKey = fastify.config.COOLIFY_SECRET_KEY_BETTER; + if (!secretKey) { + secretKey = fastify.config.COOLIFY_SECRET_KEY; + } + fastify.register(fastifyJwt, { + secret: secretKey + }); - fastify.decorate("authenticate", async function (request, reply) { - try { - await request.jwtVerify() - } catch (err) { - reply.send(err) - } - }) -}) + fastify.decorate('authenticate', async function (request, reply) { + try { + await request.jwtVerify(); + } catch (err) { + reply.send(err); + } + }); +}); declare module 'fastify' { - export interface FastifyInstance { - authenticate(): Promise - } + export interface FastifyInstance { + authenticate(): Promise; + } } diff --git a/apps/api/src/routes/api/v1/applications/handlers.ts b/apps/api/src/routes/api/v1/applications/handlers.ts index 55b926aa3..7d2c6ee8c 100644 --- a/apps/api/src/routes/api/v1/applications/handlers.ts +++ b/apps/api/src/routes/api/v1/applications/handlers.ts @@ -646,8 +646,7 @@ export async function restartApplication( const volumes = persistentStorage?.map((storage) => { - return `${applicationId}${storage.path.replace(/\//gi, '-')}:${buildPack !== 'docker' ? '/app' : '' - }${storage.path}`; + return `${applicationId}${storage.path.replace(/\//gi, '-')}:${storage.path}`; }) || []; const composeVolumes = volumes.map((volume) => { return { diff --git a/apps/api/src/routes/api/v1/databases/handlers.ts b/apps/api/src/routes/api/v1/databases/handlers.ts index dc9a6e9b4..126e34a93 100644 --- a/apps/api/src/routes/api/v1/databases/handlers.ts +++ b/apps/api/src/routes/api/v1/databases/handlers.ts @@ -5,6 +5,7 @@ import yaml from 'js-yaml'; import fs from 'fs/promises'; import { ComposeFile, + backupPostgresqlDatabase, createDirectories, decrypt, defaultComposeConfiguration, @@ -351,6 +352,21 @@ export async function startDatabase(request: FastifyRequest) { return errorHandler({ status, message }); } } +export async function backupDatabase(request: FastifyRequest, reply: FastifyReply) { + try { + const teamId = request.user.teamId; + const { id } = request.params; + const database = await prisma.database.findFirst({ + where: { id, teams: { some: { id: teamId === '0' ? undefined : teamId } } }, + include: { destinationDocker: true, settings: true } + }); + if (database.dbUserPassword) database.dbUserPassword = decrypt(database.dbUserPassword); + if (database.rootUserPassword) database.rootUserPassword = decrypt(database.rootUserPassword); + return await backupPostgresqlDatabase(database, reply); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } +} export async function stopDatabase(request: FastifyRequest) { try { const teamId = request.user.teamId; diff --git a/apps/api/src/routes/api/v1/databases/index.ts b/apps/api/src/routes/api/v1/databases/index.ts index 65f0d58f4..8a225e67e 100644 --- a/apps/api/src/routes/api/v1/databases/index.ts +++ b/apps/api/src/routes/api/v1/databases/index.ts @@ -1,5 +1,5 @@ import { FastifyPluginAsync } from 'fastify'; -import { cleanupUnconfiguredDatabases, deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers'; +import { backupDatabase, cleanupUnconfiguredDatabases, deleteDatabase, deleteDatabaseSecret, getDatabase, getDatabaseLogs, getDatabaseSecrets, getDatabaseStatus, getDatabaseTypes, getDatabaseUsage, getVersions, listDatabases, newDatabase, saveDatabase, saveDatabaseDestination, saveDatabaseSecret, saveDatabaseSettings, saveDatabaseType, saveVersion, startDatabase, stopDatabase } from './handlers'; import type { OnlyId } from '../../../../types'; @@ -39,6 +39,7 @@ const root: FastifyPluginAsync = async (fastify): Promise => { fastify.post('/:id/start', async (request) => await startDatabase(request)); fastify.post('/:id/stop', async (request) => await stopDatabase(request)); + fastify.post('/:id/backup', async (request, reply) => await backupDatabase(request, reply)); }; export default root; diff --git a/apps/api/src/routes/api/v1/destinations/handlers.ts b/apps/api/src/routes/api/v1/destinations/handlers.ts index e64ac211d..e72b46b01 100644 --- a/apps/api/src/routes/api/v1/destinations/handlers.ts +++ b/apps/api/src/routes/api/v1/destinations/handlers.ts @@ -18,6 +18,7 @@ import type { Proxy, SaveDestinationSettings } from './types'; +import { removeService } from '../../../../lib/services/common'; export async function listDestinations(request: FastifyRequest) { try { @@ -143,6 +144,35 @@ export async function newDestination(request: FastifyRequest, re return errorHandler({ status, message }); } } +export async function forceDeleteDestination(request: FastifyRequest) { + try { + const { id } = request.params; + const services = await prisma.service.findMany({ where: { destinationDockerId: id } }); + for (const service of services) { + await removeService({ id: service.id }); + } + const applications = await prisma.application.findMany({ where: { destinationDockerId: id } }); + for (const application of applications) { + await prisma.applicationSettings.deleteMany({ where: { application: { id: application.id } } }); + await prisma.buildLog.deleteMany({ where: { applicationId: application.id } }); + await prisma.build.deleteMany({ where: { applicationId: application.id } }); + await prisma.secret.deleteMany({ where: { applicationId: application.id } }); + await prisma.applicationPersistentStorage.deleteMany({ where: { applicationId: application.id } }); + await prisma.applicationConnectedDatabase.deleteMany({ where: { applicationId: application.id } }); + await prisma.previewApplication.deleteMany({ where: { applicationId: application.id } }); + } + const databases = await prisma.database.findMany({ where: { destinationDockerId: id } }); + for (const database of databases) { + await prisma.databaseSettings.deleteMany({ where: { databaseId: database.id } }); + await prisma.databaseSecret.deleteMany({ where: { databaseId: database.id } }); + await prisma.database.delete({ where: { id: database.id } }); + } + await prisma.destinationDocker.delete({ where: { id } }); + return {}; + } catch ({ status, message }) { + return errorHandler({ status, message }); + } +} export async function deleteDestination(request: FastifyRequest) { try { const { id } = request.params; @@ -318,6 +348,7 @@ export async function verifyRemoteDockerEngineFn(id: string) { } await prisma.destinationDocker.update({ where: { id }, data: { remoteVerified: true } }); } catch (error) { + console.log(error) throw new Error('Error while verifying remote docker engine'); } } diff --git a/apps/api/src/routes/api/v1/destinations/index.ts b/apps/api/src/routes/api/v1/destinations/index.ts index 774afa285..704c2aa79 100644 --- a/apps/api/src/routes/api/v1/destinations/index.ts +++ b/apps/api/src/routes/api/v1/destinations/index.ts @@ -1,5 +1,5 @@ import { FastifyPluginAsync } from 'fastify'; -import { assignSSHKey, checkDestination, deleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers'; +import { assignSSHKey, checkDestination, deleteDestination, forceDeleteDestination, getDestination, getDestinationStatus, listDestinations, newDestination, restartProxy, saveDestinationSettings, startProxy, stopProxy, verifyRemoteDockerEngine } from './handlers'; import type { OnlyId } from '../../../../types'; import type { CheckDestination, ListDestinations, NewDestination, Proxy, SaveDestinationSettings } from './types'; @@ -14,6 +14,7 @@ const root: FastifyPluginAsync = async (fastify): Promise => { fastify.get('/:id', async (request) => await getDestination(request)); fastify.post('/:id', async (request, reply) => await newDestination(request, reply)); fastify.delete('/:id', async (request) => await deleteDestination(request)); + fastify.delete('/:id/force', async (request) => await forceDeleteDestination(request)); fastify.get('/:id/status', async (request) => await getDestinationStatus(request)); fastify.post('/:id/settings', async (request) => await saveDestinationSettings(request)); diff --git a/apps/api/src/routes/api/v1/handlers.ts b/apps/api/src/routes/api/v1/handlers.ts index 5cb283137..a9525b74f 100644 --- a/apps/api/src/routes/api/v1/handlers.ts +++ b/apps/api/src/routes/api/v1/handlers.ts @@ -12,7 +12,6 @@ import { prisma, uniqueName, version, - sentryDSN, executeCommand } from '../../../lib/common'; import { scheduler } from '../../../lib/scheduler'; @@ -164,7 +163,7 @@ export async function update(request: FastifyRequest) { await executeCommand({ command: `docker pull ${image}` }); } - await executeCommand({ shell: true, command: `env | grep COOLIFY > .env` }); + await executeCommand({ shell: true, command: `ls .env || env | grep COOLIFY > .env` }); await executeCommand({ command: `sed -i '/COOLIFY_AUTO_UPDATE=/cCOOLIFY_AUTO_UPDATE=${isAutoUpdateEnabled}' .env` }); @@ -452,7 +451,6 @@ export async function getCurrentUser(request: FastifyRequest, fa }); return { settings: await prisma.setting.findUnique({ where: { id: '0' } }), - sentryDSN, pendingInvitations, token, ...request.user diff --git a/apps/api/src/routes/api/v1/settings/handlers.ts b/apps/api/src/routes/api/v1/settings/handlers.ts index 1f0ecb006..929bef1b8 100644 --- a/apps/api/src/routes/api/v1/settings/handlers.ts +++ b/apps/api/src/routes/api/v1/settings/handlers.ts @@ -1,235 +1,312 @@ import { promises as dns } from 'dns'; import { X509Certificate } from 'node:crypto'; -import * as Sentry from '@sentry/node'; import type { FastifyReply, FastifyRequest } from 'fastify'; -import { checkDomainsIsValidInDNS, decrypt, encrypt, errorHandler, executeCommand, getDomain, isDev, isDNSValid, isDomainConfigured, listSettings, prisma, sentryDSN, version } from '../../../../lib/common'; -import { AddDefaultRegistry, CheckDNS, CheckDomain, DeleteDomain, OnlyIdInBody, SaveSettings, SaveSSHKey, SetDefaultRegistry } from './types'; - +import { + checkDomainsIsValidInDNS, + decrypt, + encrypt, + errorHandler, + executeCommand, + getDomain, + isDev, + isDNSValid, + isDomainConfigured, + listSettings, + prisma +} from '../../../../lib/common'; +import { + AddDefaultRegistry, + CheckDNS, + CheckDomain, + DeleteDomain, + OnlyIdInBody, + SaveSettings, + SaveSSHKey, + SetDefaultRegistry +} from './types'; export async function listAllSettings(request: FastifyRequest) { - try { - const teamId = request.user.teamId; - const settings = await listSettings(); - const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } }) - let registries = await prisma.dockerRegistry.findMany({ where: { team: { id: teamId } } }) - registries = registries.map((registry) => { - if (registry.password) { - registry.password = decrypt(registry.password) - } - return registry - }) - const unencryptedKeys = [] - if (sshKeys.length > 0) { - for (const key of sshKeys) { - unencryptedKeys.push({ id: key.id, name: key.name, privateKey: decrypt(key.privateKey), createdAt: key.createdAt }) - } - } - const certificates = await prisma.certificate.findMany({ where: { team: { id: teamId } } }) - let cns = []; - for (const certificate of certificates) { - const x509 = new X509Certificate(certificate.cert); - cns.push({ commonName: x509.subject.split('\n').find((s) => s.startsWith('CN=')).replace('CN=', ''), id: certificate.id, createdAt: certificate.createdAt }) - } + try { + const teamId = request.user.teamId; + const settings = await listSettings(); + const sshKeys = await prisma.sshKey.findMany({ where: { team: { id: teamId } } }); + let registries = await prisma.dockerRegistry.findMany({ where: { team: { id: teamId } } }); + registries = registries.map((registry) => { + if (registry.password) { + registry.password = decrypt(registry.password); + } + return registry; + }); + const unencryptedKeys = []; + if (sshKeys.length > 0) { + for (const key of sshKeys) { + unencryptedKeys.push({ + id: key.id, + name: key.name, + privateKey: decrypt(key.privateKey), + createdAt: key.createdAt + }); + } + } + const certificates = await prisma.certificate.findMany({ where: { team: { id: teamId } } }); + let cns = []; + for (const certificate of certificates) { + const x509 = new X509Certificate(certificate.cert); + cns.push({ + commonName: x509.subject + .split('\n') + .find((s) => s.startsWith('CN=')) + .replace('CN=', ''), + id: certificate.id, + createdAt: certificate.createdAt + }); + } - return { - settings, - certificates: cns, - sshKeys: unencryptedKeys, - registries - } - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + return { + settings, + certificates: cns, + sshKeys: unencryptedKeys, + registries + }; + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function saveSettings(request: FastifyRequest, reply: FastifyReply) { - try { - let { - previewSeparator, - numberOfDockerImagesKeptLocally, - doNotTrack, - fqdn, - isAPIDebuggingEnabled, - isRegistrationEnabled, - dualCerts, - minPort, - maxPort, - isAutoUpdateEnabled, - isDNSCheckEnabled, - DNSServers, - proxyDefaultRedirect - } = request.body - const { id, previewSeparator: SetPreviewSeparator } = await listSettings(); - if (numberOfDockerImagesKeptLocally) { - numberOfDockerImagesKeptLocally = Number(numberOfDockerImagesKeptLocally) - } - if (previewSeparator == '') { - previewSeparator = '.' - } - if (SetPreviewSeparator != previewSeparator) { - const applications = await prisma.application.findMany({ where: { previewApplication: { some: { id: { not: undefined } } } }, include: { previewApplication: true } }) - for (const application of applications) { - for (const preview of application.previewApplication) { - const { protocol } = new URL(preview.customDomain) - const { pullmergeRequestId } = preview - const { fqdn } = application - const newPreviewDomain = `${protocol}//${pullmergeRequestId}${previewSeparator}${getDomain(fqdn)}` - await prisma.previewApplication.update({ where: { id: preview.id }, data: { customDomain: newPreviewDomain } }) - } - } - } + try { + let { + previewSeparator, + numberOfDockerImagesKeptLocally, + doNotTrack, + fqdn, + isAPIDebuggingEnabled, + isRegistrationEnabled, + dualCerts, + minPort, + maxPort, + isAutoUpdateEnabled, + isDNSCheckEnabled, + DNSServers, + proxyDefaultRedirect + } = request.body; + const { id, previewSeparator: SetPreviewSeparator } = await listSettings(); + if (numberOfDockerImagesKeptLocally) { + numberOfDockerImagesKeptLocally = Number(numberOfDockerImagesKeptLocally); + } + if (previewSeparator == '') { + previewSeparator = '.'; + } + if (SetPreviewSeparator != previewSeparator) { + const applications = await prisma.application.findMany({ + where: { previewApplication: { some: { id: { not: undefined } } } }, + include: { previewApplication: true } + }); + for (const application of applications) { + for (const preview of application.previewApplication) { + const { protocol } = new URL(preview.customDomain); + const { pullmergeRequestId } = preview; + const { fqdn } = application; + const newPreviewDomain = `${protocol}//${pullmergeRequestId}${previewSeparator}${getDomain( + fqdn + )}`; + await prisma.previewApplication.update({ + where: { id: preview.id }, + data: { customDomain: newPreviewDomain } + }); + } + } + } - await prisma.setting.update({ - where: { id }, - data: { previewSeparator, numberOfDockerImagesKeptLocally, doNotTrack, isRegistrationEnabled, dualCerts, isAutoUpdateEnabled, isDNSCheckEnabled, DNSServers, isAPIDebuggingEnabled } - }); - if (fqdn) { - await prisma.setting.update({ where: { id }, data: { fqdn } }); - } - await prisma.setting.update({ where: { id }, data: { proxyDefaultRedirect } }); - if (minPort && maxPort) { - await prisma.setting.update({ where: { id }, data: { minPort, maxPort } }); - } - if (doNotTrack === false) { - // Sentry.init({ - // dsn: sentryDSN, - // environment: isDev ? 'development' : 'production', - // release: version - // }); - // console.log('Sentry initialized') - } - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + await prisma.setting.update({ + where: { id }, + data: { + previewSeparator, + numberOfDockerImagesKeptLocally, + doNotTrack, + isRegistrationEnabled, + dualCerts, + isAutoUpdateEnabled, + isDNSCheckEnabled, + DNSServers, + isAPIDebuggingEnabled + } + }); + if (fqdn) { + await prisma.setting.update({ where: { id }, data: { fqdn } }); + } + await prisma.setting.update({ where: { id }, data: { proxyDefaultRedirect } }); + if (minPort && maxPort) { + await prisma.setting.update({ where: { id }, data: { minPort, maxPort } }); + } + if (doNotTrack === false) { + // Sentry.init({ + // dsn: sentryDSN, + // environment: isDev ? 'development' : 'production', + // release: version + // }); + // console.log('Sentry initialized') + } + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function deleteDomain(request: FastifyRequest, reply: FastifyReply) { - try { - const { fqdn } = request.body - const { DNSServers } = await listSettings(); - if (DNSServers) { - dns.setServers([...DNSServers.split(',')]); - } - let ip; - try { - ip = await dns.resolve(fqdn); - } catch (error) { - // Do not care. - } - await prisma.setting.update({ where: { fqdn }, data: { fqdn: null } }); - return reply.redirect(302, ip ? `http://${ip[0]}:3000/settings` : undefined) - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + try { + const { fqdn } = request.body; + const { DNSServers } = await listSettings(); + if (DNSServers) { + dns.setServers([...DNSServers.split(',')]); + } + let ip; + try { + ip = await dns.resolve(fqdn); + } catch (error) { + // Do not care. + } + await prisma.setting.update({ where: { fqdn }, data: { fqdn: null } }); + return reply.redirect(302, ip ? `http://${ip[0]}:3000/settings` : undefined); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function checkDomain(request: FastifyRequest) { - try { - const { id } = request.params; - let { fqdn, forceSave, dualCerts, isDNSCheckEnabled } = request.body - if (fqdn) fqdn = fqdn.toLowerCase(); - const found = await isDomainConfigured({ id, fqdn }); - if (found) { - throw { message: "Domain already configured" }; - } - if (isDNSCheckEnabled && !forceSave && !isDev) { - const hostname = request.hostname.split(':')[0] - return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }); - } - return {}; - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + try { + const { id } = request.params; + let { fqdn, forceSave, dualCerts, isDNSCheckEnabled } = request.body; + if (fqdn) fqdn = fqdn.toLowerCase(); + const found = await isDomainConfigured({ id, fqdn }); + if (found) { + throw { message: 'Domain already configured' }; + } + if (isDNSCheckEnabled && !forceSave && !isDev) { + const hostname = request.hostname.split(':')[0]; + return await checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }); + } + return {}; + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function checkDNS(request: FastifyRequest) { - try { - const { domain } = request.params; - await isDNSValid(request.hostname, domain); - return {} - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + try { + const { domain } = request.params; + await isDNSValid(request.hostname, domain); + return {}; + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function saveSSHKey(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { privateKey, name } = request.body; - const found = await prisma.sshKey.findMany({ where: { name } }) - if (found.length > 0) { - throw { - message: "Name already used. Choose another one please." - } - } - const encryptedSSHKey = encrypt(privateKey) - await prisma.sshKey.create({ data: { name, privateKey: encryptedSSHKey, team: { connect: { id: teamId } } } }) - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + try { + const teamId = request.user.teamId; + const { privateKey, name } = request.body; + const found = await prisma.sshKey.findMany({ where: { name } }); + if (found.length > 0) { + throw { + message: 'Name already used. Choose another one please.' + }; + } + const encryptedSSHKey = encrypt(privateKey); + await prisma.sshKey.create({ + data: { name, privateKey: encryptedSSHKey, team: { connect: { id: teamId } } } + }); + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } export async function deleteSSHKey(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { id } = request.body; - await prisma.sshKey.deleteMany({ where: { id, teamId } }) - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + try { + const teamId = request.user.teamId; + const { id } = request.body; + await prisma.sshKey.deleteMany({ where: { id, teamId } }); + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } -export async function deleteCertificates(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { id } = request.body; - await executeCommand({ command: `docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`, shell: true }) - await prisma.certificate.deleteMany({ where: { id, teamId } }) - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } +export async function deleteCertificates( + request: FastifyRequest, + reply: FastifyReply +) { + try { + const teamId = request.user.teamId; + const { id } = request.body; + await executeCommand({ + command: `docker exec coolify-proxy sh -c 'rm -f /etc/traefik/acme/custom/${id}-key.pem /etc/traefik/acme/custom/${id}-cert.pem'`, + shell: true + }); + await prisma.certificate.deleteMany({ where: { id, teamId } }); + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } -export async function setDockerRegistry(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { id, username, password } = request.body; +export async function setDockerRegistry( + request: FastifyRequest, + reply: FastifyReply +) { + try { + const teamId = request.user.teamId; + const { id, username, password } = request.body; - let encryptedPassword = '' - if (password) encryptedPassword = encrypt(password) + let encryptedPassword = ''; + if (password) encryptedPassword = encrypt(password); - if (teamId === '0') { - await prisma.dockerRegistry.update({ where: { id }, data: { username, password: encryptedPassword } }) - } else { - await prisma.dockerRegistry.updateMany({ where: { id, teamId }, data: { username, password: encryptedPassword } }) - } - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + if (teamId === '0') { + await prisma.dockerRegistry.update({ + where: { id }, + data: { username, password: encryptedPassword } + }); + } else { + await prisma.dockerRegistry.updateMany({ + where: { id, teamId }, + data: { username, password: encryptedPassword } + }); + } + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } -export async function addDockerRegistry(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { name, url, username, password } = request.body; +export async function addDockerRegistry( + request: FastifyRequest, + reply: FastifyReply +) { + try { + const teamId = request.user.teamId; + const { name, url, username, password } = request.body; - let encryptedPassword = '' - if (password) encryptedPassword = encrypt(password) - await prisma.dockerRegistry.create({ data: { name, url, username, password: encryptedPassword, team: { connect: { id: teamId } } } }) + let encryptedPassword = ''; + if (password) encryptedPassword = encrypt(password); + await prisma.dockerRegistry.create({ + data: { name, url, username, password: encryptedPassword, team: { connect: { id: teamId } } } + }); - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } +} +export async function deleteDockerRegistry( + request: FastifyRequest, + reply: FastifyReply +) { + try { + const teamId = request.user.teamId; + const { id } = request.body; + await prisma.application.updateMany({ + where: { dockerRegistryId: id }, + data: { dockerRegistryId: null } + }); + await prisma.dockerRegistry.deleteMany({ where: { id, teamId } }); + return reply.code(201).send(); + } catch ({ status, message }) { + return errorHandler({ status, message }); + } } -export async function deleteDockerRegistry(request: FastifyRequest, reply: FastifyReply) { - try { - const teamId = request.user.teamId; - const { id } = request.body; - await prisma.application.updateMany({ where: { dockerRegistryId: id }, data: { dockerRegistryId: null } }) - await prisma.dockerRegistry.deleteMany({ where: { id, teamId } }) - return reply.code(201).send() - } catch ({ status, message }) { - return errorHandler({ status, message }) - } -} \ No newline at end of file diff --git a/apps/ui/package.json b/apps/ui/package.json index d40dba2a0..1cadeffd8 100644 --- a/apps/ui/package.json +++ b/apps/ui/package.json @@ -42,8 +42,6 @@ }, "type": "module", "dependencies": { - "@sentry/svelte": "7.21.1", - "@sentry/tracing": "7.21.1", "@sveltejs/adapter-static": "1.0.0-next.48", "@tailwindcss/typography": "0.5.8", "cuid": "2.1.8", diff --git a/apps/ui/src/hooks.ts b/apps/ui/src/hooks.ts index df3284d5a..6723af14f 100644 --- a/apps/ui/src/hooks.ts +++ b/apps/ui/src/hooks.ts @@ -1,13 +1,10 @@ -import * as Sentry from '@sentry/svelte'; export async function handle({ event, resolve }) { - const response = await resolve(event, { ssr: false }); - return response; + const response = await resolve(event, { ssr: false }); + return response; } export const handleError = ({ error, event }) => { - Sentry.captureException(error, { event }); - - return { - message: 'Whoops!', - code: error?.code ?? 'UNKNOWN' - }; -}; \ No newline at end of file + return { + message: 'Whoops!', + code: error?.code ?? 'UNKNOWN' + }; +}; diff --git a/apps/ui/src/lib/api.ts b/apps/ui/src/lib/api.ts index 058ec083f..9373641fe 100644 --- a/apps/ui/src/lib/api.ts +++ b/apps/ui/src/lib/api.ts @@ -1,5 +1,6 @@ import { dev } from '$app/env'; import Cookies from 'js-cookie'; +import { dashify } from './common'; export function getAPIUrl() { if (GITPOD_WORKSPACE_URL) { @@ -100,6 +101,14 @@ async function send({ responseData = await response.json(); } else if (contentType?.indexOf('text/plain') !== -1) { responseData = await response.text(); + } else if (contentType?.indexOf('application/octet-stream') !== -1) { + responseData = await response.blob(); + const fileName = dashify(data.id + '-' + data.name) + const fileLink = document.createElement('a'); + fileLink.href = URL.createObjectURL(new Blob([responseData])) + fileLink.download = fileName + '.gz'; + fileLink.click(); + fileLink.remove(); } else { return {}; } diff --git a/apps/ui/src/routes/__layout.svelte b/apps/ui/src/routes/__layout.svelte index 4e68957b2..1c432fd5a 100644 --- a/apps/ui/src/routes/__layout.svelte +++ b/apps/ui/src/routes/__layout.svelte @@ -65,7 +65,6 @@
@@ -144,6 +163,19 @@ class:bg-databases={!loading.main} disabled={loading.main}>{$t('forms.save')} + {#if database.type !== 'redis' && database.type !== 'edgedb'} + {#if $status.database.isRunning} + + {:else} + + {/if} + {/if} {/if}
diff --git a/apps/ui/src/routes/destinations/[id]/_NewRemoteDocker.svelte b/apps/ui/src/routes/destinations/[id]/_NewRemoteDocker.svelte index 5b569ab63..2944506fa 100644 --- a/apps/ui/src/routes/destinations/[id]/_NewRemoteDocker.svelte +++ b/apps/ui/src/routes/destinations/[id]/_NewRemoteDocker.svelte @@ -34,14 +34,20 @@ customClass="max-w-[32rem]" text="Remote Docker Engines are using SSH to communicate with the remote docker engine. You need to setup an SSH key in advance on the server and install Docker. -
See docs for more details." +
See docs for more details." />
-
+
{$t('forms.configuration')}
- + Force Delete +
{/if} diff --git a/apps/ui/src/routes/sources/[id]/_Gitlab.svelte b/apps/ui/src/routes/sources/[id]/_Gitlab.svelte index 0e9d616e3..47e051def 100644 --- a/apps/ui/src/routes/sources/[id]/_Gitlab.svelte +++ b/apps/ui/src/routes/sources/[id]/_Gitlab.svelte @@ -52,7 +52,7 @@ appSecret: source.gitlabApp.appSecret, groupName: source.gitlabApp.groupName, customPort: source.customPort, - customUser: source.customUser, + customUser: source.customUser }); const from = $page.url.searchParams.get('from'); if (from) { @@ -169,8 +169,8 @@
{#if !source.gitlabAppId} Documentation and detailed instructions. diff --git a/package.json b/package.json index c4ce72c9d..c92d76f67 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "coolify", "description": "An open-source & self-hostable Heroku / Netlify alternative.", - "version": "3.12.31", + "version": "3.12.34", "license": "Apache-2.0", "repository": "github:coollabsio/coolify", "scripts": { @@ -32,7 +32,7 @@ "build:api": "NODE_ENV=production pnpm run --filter api build", "build:ui": "NODE_ENV=production pnpm run --filter ui build", "dockerlogin": "echo $DOCKER_PASS | docker login --username=$DOCKER_USER --password-stdin", - "release:staging:amd": "docker build -t ghcr.io/coollabsio/coolify:next . && docker push ghcr.io/coollabsio/coolify:next", + "release:staging:amd": "docker build -t ghcr.io/coollabsio/coolify:v3 . && docker push ghcr.io/coollabsio/coolify:v3", "release:local": "rm -fr ./local-serve && mkdir ./local-serve && pnpm build && cp -Rp apps/api/build/* ./local-serve && cp -Rp apps/ui/build/ ./local-serve/public && cp -Rp apps/api/prisma/ ./local-serve/prisma && cp -Rp apps/api/package.json ./local-serve && env | grep '^COOLIFY_' > ./local-serve/.env && cd ./local-serve && pnpm install . && pnpm start" }, "devDependencies": { @@ -50,4 +50,4 @@ "open-source", "coolify" ] -} +} \ No newline at end of file