mirror of
https://github.com/ershisan99/coolify.git
synced 2026-01-31 21:02:04 +00:00
fix: cleanupStuckedContainers
This commit is contained in:
843
apps/trpc-experimental/server/src/lib/buildPacks/common.ts
Normal file
843
apps/trpc-experimental/server/src/lib/buildPacks/common.ts
Normal file
@@ -0,0 +1,843 @@
|
||||
import {
|
||||
base64Encode,
|
||||
decrypt,
|
||||
encrypt,
|
||||
generateSecrets,
|
||||
generateTimestamp,
|
||||
getDomain,
|
||||
isARM,
|
||||
isDev,
|
||||
version
|
||||
} from '../common';
|
||||
import { promises as fs } from 'fs';
|
||||
import { day } from '../dayjs';
|
||||
import { prisma } from '../../prisma';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
const staticApps = ['static', 'react', 'vuejs', 'svelte', 'gatsby', 'astro', 'eleventy'];
|
||||
const nodeBased = [
|
||||
'react',
|
||||
'preact',
|
||||
'vuejs',
|
||||
'svelte',
|
||||
'gatsby',
|
||||
'astro',
|
||||
'eleventy',
|
||||
'node',
|
||||
'nestjs',
|
||||
'nuxtjs',
|
||||
'nextjs'
|
||||
];
|
||||
|
||||
export function setDefaultBaseImage(
|
||||
buildPack: string | null,
|
||||
deploymentType: string | null = null
|
||||
) {
|
||||
const nodeVersions = [
|
||||
{
|
||||
value: 'node:lts',
|
||||
label: 'node:lts'
|
||||
},
|
||||
{
|
||||
value: 'node:18',
|
||||
label: 'node:18'
|
||||
},
|
||||
{
|
||||
value: 'node:17',
|
||||
label: 'node:17'
|
||||
},
|
||||
{
|
||||
value: 'node:16',
|
||||
label: 'node:16'
|
||||
},
|
||||
{
|
||||
value: 'node:14',
|
||||
label: 'node:14'
|
||||
},
|
||||
{
|
||||
value: 'node:12',
|
||||
label: 'node:12'
|
||||
}
|
||||
];
|
||||
const staticVersions = [
|
||||
{
|
||||
value: 'webdevops/nginx:alpine',
|
||||
label: 'webdevops/nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/apache:alpine',
|
||||
label: 'webdevops/apache:alpine'
|
||||
},
|
||||
{
|
||||
value: 'nginx:alpine',
|
||||
label: 'nginx:alpine'
|
||||
},
|
||||
{
|
||||
value: 'httpd:alpine',
|
||||
label: 'httpd:alpine (Apache)'
|
||||
}
|
||||
];
|
||||
const rustVersions = [
|
||||
{
|
||||
value: 'rust:latest',
|
||||
label: 'rust:latest'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60',
|
||||
label: 'rust:1.60'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-buster',
|
||||
label: 'rust:1.60-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-bullseye',
|
||||
label: 'rust:1.60-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-buster',
|
||||
label: 'rust:1.60-slim-buster'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-slim-bullseye',
|
||||
label: 'rust:1.60-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.14',
|
||||
label: 'rust:1.60-alpine3.14'
|
||||
},
|
||||
{
|
||||
value: 'rust:1.60-alpine3.15',
|
||||
label: 'rust:1.60-alpine3.15'
|
||||
}
|
||||
];
|
||||
const phpVersions = [
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2',
|
||||
label: 'webdevops/php-apache:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2',
|
||||
label: 'webdevops/php-nginx:8.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1',
|
||||
label: 'webdevops/php-apache:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1',
|
||||
label: 'webdevops/php-nginx:8.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0',
|
||||
label: 'webdevops/php-apache:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0',
|
||||
label: 'webdevops/php-nginx:8.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4',
|
||||
label: 'webdevops/php-apache:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4',
|
||||
label: 'webdevops/php-nginx:7.4'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3',
|
||||
label: 'webdevops/php-apache:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3',
|
||||
label: 'webdevops/php-nginx:7.3'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2',
|
||||
label: 'webdevops/php-apache:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2',
|
||||
label: 'webdevops/php-nginx:7.2'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1',
|
||||
label: 'webdevops/php-apache:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.1',
|
||||
label: 'webdevops/php-nginx:7.1'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.0',
|
||||
label: 'webdevops/php-apache:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.0',
|
||||
label: 'webdevops/php-nginx:7.0'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:5.6',
|
||||
label: 'webdevops/php-apache:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:5.6',
|
||||
label: 'webdevops/php-nginx:5.6'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.2-alpine',
|
||||
label: 'webdevops/php-apache:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.2-alpine',
|
||||
label: 'webdevops/php-nginx:8.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.1-alpine',
|
||||
label: 'webdevops/php-apache:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.1-alpine',
|
||||
label: 'webdevops/php-nginx:8.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:8.0-alpine',
|
||||
label: 'webdevops/php-apache:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:8.0-alpine',
|
||||
label: 'webdevops/php-nginx:8.0-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.4-alpine',
|
||||
label: 'webdevops/php-apache:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.4-alpine',
|
||||
label: 'webdevops/php-nginx:7.4-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.3-alpine',
|
||||
label: 'webdevops/php-apache:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.3-alpine',
|
||||
label: 'webdevops/php-nginx:7.3-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.2-alpine',
|
||||
label: 'webdevops/php-apache:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-nginx:7.2-alpine',
|
||||
label: 'webdevops/php-nginx:7.2-alpine'
|
||||
},
|
||||
{
|
||||
value: 'webdevops/php-apache:7.1-alpine',
|
||||
label: 'webdevops/php-apache:7.1-alpine'
|
||||
},
|
||||
{
|
||||
value: 'php:8.1-fpm',
|
||||
label: 'php:8.1-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm',
|
||||
label: 'php:8.0-fpm'
|
||||
},
|
||||
{
|
||||
value: 'php:8.1-fpm-alpine',
|
||||
label: 'php:8.1-fpm-alpine'
|
||||
},
|
||||
{
|
||||
value: 'php:8.0-fpm-alpine',
|
||||
label: 'php:8.0-fpm-alpine'
|
||||
}
|
||||
];
|
||||
const pythonVersions = [
|
||||
{
|
||||
value: 'python:3.10-alpine',
|
||||
label: 'python:3.10-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-buster',
|
||||
label: 'python:3.10-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-bullseye',
|
||||
label: 'python:3.10-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.10-slim-bullseye',
|
||||
label: 'python:3.10-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-alpine',
|
||||
label: 'python:3.9-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-buster',
|
||||
label: 'python:3.9-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-bullseye',
|
||||
label: 'python:3.9-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.9-slim-bullseye',
|
||||
label: 'python:3.9-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-alpine',
|
||||
label: 'python:3.8-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-buster',
|
||||
label: 'python:3.8-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-bullseye',
|
||||
label: 'python:3.8-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.8-slim-bullseye',
|
||||
label: 'python:3.8-slim-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-alpine',
|
||||
label: 'python:3.7-alpine'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-buster',
|
||||
label: 'python:3.7-buster'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-bullseye',
|
||||
label: 'python:3.7-bullseye'
|
||||
},
|
||||
{
|
||||
value: 'python:3.7-slim-bullseye',
|
||||
label: 'python:3.7-slim-bullseye'
|
||||
}
|
||||
];
|
||||
const herokuVersions = [
|
||||
{
|
||||
value: 'heroku/builder:22',
|
||||
label: 'heroku/builder:22'
|
||||
},
|
||||
{
|
||||
value: 'heroku/buildpacks:20',
|
||||
label: 'heroku/buildpacks:20'
|
||||
},
|
||||
{
|
||||
value: 'heroku/builder-classic:22',
|
||||
label: 'heroku/builder-classic:22'
|
||||
}
|
||||
];
|
||||
let payload: any = {
|
||||
baseImage: null,
|
||||
baseBuildImage: null,
|
||||
baseImages: [],
|
||||
baseBuildImages: []
|
||||
};
|
||||
if (nodeBased.includes(buildPack)) {
|
||||
if (deploymentType === 'static') {
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
} else {
|
||||
payload.baseImage = 'node:lts';
|
||||
payload.baseImages = nodeVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
}
|
||||
if (staticApps.includes(buildPack)) {
|
||||
payload.baseImage = isARM(process.arch) ? 'nginx:alpine' : 'webdevops/nginx:alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? staticVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: staticVersions;
|
||||
payload.baseBuildImage = 'node:lts';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'python') {
|
||||
payload.baseImage = 'python:3.10-alpine';
|
||||
payload.baseImages = pythonVersions;
|
||||
}
|
||||
if (buildPack === 'rust') {
|
||||
payload.baseImage = 'rust:latest';
|
||||
payload.baseBuildImage = 'rust:latest';
|
||||
payload.baseImages = rustVersions;
|
||||
payload.baseBuildImages = rustVersions;
|
||||
}
|
||||
if (buildPack === 'deno') {
|
||||
payload.baseImage = 'denoland/deno:latest';
|
||||
}
|
||||
if (buildPack === 'php') {
|
||||
payload.baseImage = isARM(process.arch)
|
||||
? 'php:8.1-fpm-alpine'
|
||||
: 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: phpVersions;
|
||||
}
|
||||
if (buildPack === 'laravel') {
|
||||
payload.baseImage = isARM(process.arch)
|
||||
? 'php:8.1-fpm-alpine'
|
||||
: 'webdevops/php-apache:8.2-alpine';
|
||||
payload.baseImages = isARM(process.arch)
|
||||
? phpVersions.filter((version) => !version.value.includes('webdevops'))
|
||||
: phpVersions;
|
||||
payload.baseBuildImage = 'node:18';
|
||||
payload.baseBuildImages = nodeVersions;
|
||||
}
|
||||
if (buildPack === 'heroku') {
|
||||
payload.baseImage = 'heroku/buildpacks:20';
|
||||
payload.baseImages = herokuVersions;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
|
||||
export const setDefaultConfiguration = async (data: any) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
//@ts-ignore
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3000;
|
||||
|
||||
if (buildPack === 'static') port = 80;
|
||||
else if (buildPack === 'node') port = 3000;
|
||||
else if (buildPack === 'php') port = 80;
|
||||
else if (buildPack === 'python') port = 8000;
|
||||
}
|
||||
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
installCommand = template?.installCommand || 'yarn install';
|
||||
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
startCommand = template?.startCommand || 'yarn start';
|
||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
|
||||
baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith('/'))
|
||||
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith('/'))
|
||||
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = 'main.ts';
|
||||
}
|
||||
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
|
||||
export const scanningTemplates = {
|
||||
'@sveltejs/kit': {
|
||||
buildPack: 'nodejs'
|
||||
},
|
||||
astro: {
|
||||
buildPack: 'astro'
|
||||
},
|
||||
'@11ty/eleventy': {
|
||||
buildPack: 'eleventy'
|
||||
},
|
||||
svelte: {
|
||||
buildPack: 'svelte'
|
||||
},
|
||||
'@nestjs/core': {
|
||||
buildPack: 'nestjs'
|
||||
},
|
||||
next: {
|
||||
buildPack: 'nextjs'
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: 'nuxtjs'
|
||||
},
|
||||
'react-scripts': {
|
||||
buildPack: 'react'
|
||||
},
|
||||
'parcel-bundler': {
|
||||
buildPack: 'static'
|
||||
},
|
||||
'@vue/cli-service': {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: 'gatsby'
|
||||
},
|
||||
'preact-cli': {
|
||||
buildPack: 'react'
|
||||
}
|
||||
};
|
||||
|
||||
export const saveBuildLog = async ({
|
||||
line,
|
||||
buildId,
|
||||
applicationId
|
||||
}: {
|
||||
line: string;
|
||||
buildId: string;
|
||||
applicationId: string;
|
||||
}): Promise<any> => {
|
||||
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||
const { default: got } = await import('got');
|
||||
if (typeof line === 'object' && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + '\n' + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
const fluentBitUrl = isDev
|
||||
? process.env.COOLIFY_CONTAINER_DEV === 'true'
|
||||
? 'http://coolify-fluentbit:24224'
|
||||
: 'http://localhost:24224'
|
||||
: 'http://coolify-fluentbit:24224';
|
||||
|
||||
if (isDev && !process.env.COOLIFY_CONTAINER_DEV) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: encrypt(line)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp,
|
||||
buildId,
|
||||
time: Number(day().valueOf()),
|
||||
applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export async function copyBaseConfigurationFiles(
|
||||
buildPack,
|
||||
workdir,
|
||||
buildId,
|
||||
applicationId,
|
||||
baseImage
|
||||
) {
|
||||
try {
|
||||
if (buildPack === 'php') {
|
||||
await fs.writeFile(`${workdir}/entrypoint.sh`, `chown -R 1000 /app`);
|
||||
await saveBuildLog({
|
||||
line: 'Copied default configuration file for PHP.',
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
} else if (baseImage?.includes('nginx')) {
|
||||
await fs.writeFile(
|
||||
`${workdir}/nginx.conf`,
|
||||
`user nginx;
|
||||
worker_processes auto;
|
||||
|
||||
error_log /docker.stdout;
|
||||
pid /run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /docker.stdout main;
|
||||
|
||||
sendfile on;
|
||||
tcp_nopush on;
|
||||
tcp_nodelay on;
|
||||
keepalive_timeout 65;
|
||||
types_hash_max_size 2048;
|
||||
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location / {
|
||||
root /app;
|
||||
index index.html;
|
||||
try_files $uri $uri/index.html $uri/ /index.html =404;
|
||||
}
|
||||
|
||||
error_page 404 /50x.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /app;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
`
|
||||
);
|
||||
}
|
||||
// TODO: Add more configuration files for other buildpacks, like apache2, etc.
|
||||
} catch (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
export function checkPnpm(installCommand = null, buildCommand = null, startCommand = null) {
|
||||
return (
|
||||
installCommand?.includes('pnpm') ||
|
||||
buildCommand?.includes('pnpm') ||
|
||||
startCommand?.includes('pnpm')
|
||||
);
|
||||
}
|
||||
|
||||
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let decryptedPassword = decrypt(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
|
||||
try {
|
||||
await fs.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
auths: {
|
||||
[url]: {
|
||||
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||
}
|
||||
}
|
||||
});
|
||||
await fs.writeFile(`${location}/config.json`, payload);
|
||||
return location;
|
||||
}
|
||||
export async function buildImage({
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildId,
|
||||
dockerId,
|
||||
isCache = false,
|
||||
debug = false,
|
||||
dockerFileLocation = '/Dockerfile',
|
||||
commit,
|
||||
forceRebuild = false
|
||||
}) {
|
||||
if (isCache) {
|
||||
await saveBuildLog({ line: `Building cache image...`, buildId, applicationId });
|
||||
} else {
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
}
|
||||
const dockerFile = isCache ? `${dockerFileLocation}-cache` : `${dockerFileLocation}`;
|
||||
const cache = `${applicationId}:${tag}${isCache ? '-cache' : ''}`;
|
||||
let location = null;
|
||||
|
||||
const { dockerRegistry } = await prisma.application.findUnique({
|
||||
where: { id: applicationId },
|
||||
select: { dockerRegistry: true }
|
||||
});
|
||||
if (dockerRegistry) {
|
||||
const { url, username, password } = dockerRegistry;
|
||||
location = await saveDockerRegistryCredentials({ url, username, password, workdir });
|
||||
}
|
||||
|
||||
await executeCommand({
|
||||
stream: true,
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker ${location ? `--config ${location}` : ''} build ${
|
||||
forceRebuild ? '--no-cache' : ''
|
||||
} --progress plain -f ${workdir}/${dockerFile} -t ${cache} --build-arg SOURCE_COMMIT=${commit} ${workdir}`
|
||||
});
|
||||
|
||||
const { status } = await prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (status === 'canceled') {
|
||||
throw new Error('Canceled.');
|
||||
}
|
||||
}
|
||||
export function makeLabelForSimpleDockerfile({ applicationId, port, type }) {
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`
|
||||
];
|
||||
}
|
||||
export function makeLabelForStandaloneApplication({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId = null,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
}) {
|
||||
if (pullmergeRequestId) {
|
||||
const protocol = fqdn.startsWith('https://') ? 'https' : 'http';
|
||||
const domain = getDomain(fqdn);
|
||||
fqdn = `${protocol}://${pullmergeRequestId}.${domain}`;
|
||||
}
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.applicationId=${applicationId}`,
|
||||
`coolify.type=standalone-application`,
|
||||
`coolify.name=${name}`,
|
||||
`coolify.configuration=${base64Encode(
|
||||
JSON.stringify({
|
||||
applicationId,
|
||||
fqdn,
|
||||
name,
|
||||
type,
|
||||
pullmergeRequestId,
|
||||
buildPack,
|
||||
repository,
|
||||
branch,
|
||||
projectId,
|
||||
port,
|
||||
commit,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
publishDirectory
|
||||
})
|
||||
)}`
|
||||
];
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithNode(data, imageForBuild) {
|
||||
const {
|
||||
workdir,
|
||||
buildId,
|
||||
baseDirectory,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
secrets,
|
||||
pullmergeRequestId
|
||||
} = data;
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand);
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
if (installCommand) {
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
}
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
|
||||
export async function buildCacheImageForLaravel(data, imageForBuild) {
|
||||
const { workdir, buildId, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push(`COPY *.json *.mix.js /app/`);
|
||||
Dockerfile.push(`COPY resources /app/resources`);
|
||||
Dockerfile.push(`RUN yarn install && yarn production`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
|
||||
export async function buildCacheImageWithCargo(data, imageForBuild) {
|
||||
const { applicationId, workdir, buildId } = data;
|
||||
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${imageForBuild} as planner-${applicationId}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push('COPY . .');
|
||||
Dockerfile.push('RUN cargo chef prepare --recipe-path recipe.json');
|
||||
Dockerfile.push(`FROM ${imageForBuild}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push('RUN cargo install cargo-chef');
|
||||
Dockerfile.push(`COPY --from=planner-${applicationId} /app/recipe.json recipe.json`);
|
||||
Dockerfile.push('RUN cargo chef cook --release --recipe-path recipe.json');
|
||||
await fs.writeFile(`${workdir}/Dockerfile-cache`, Dockerfile.join('\n'));
|
||||
await buildImage({ ...data, isCache: true });
|
||||
}
|
||||
137
apps/trpc-experimental/server/src/lib/buildPacks/compose.ts
Normal file
137
apps/trpc-experimental/server/src/lib/buildPacks/compose.ts
Normal file
@@ -0,0 +1,137 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { saveBuildLog } from './common';
|
||||
import yaml from 'js-yaml';
|
||||
import { generateSecrets } from '../common';
|
||||
import { defaultComposeConfiguration } from '../docker';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
export default async function (data) {
|
||||
let {
|
||||
applicationId,
|
||||
debug,
|
||||
buildId,
|
||||
dockerId,
|
||||
network,
|
||||
volumes,
|
||||
labels,
|
||||
workdir,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
dockerComposeConfiguration,
|
||||
dockerComposeFileLocation
|
||||
} = data;
|
||||
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
|
||||
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
|
||||
const dockerComposeYaml = yaml.load(dockerComposeRaw);
|
||||
if (!dockerComposeYaml.services) {
|
||||
throw 'No Services found in docker-compose file.';
|
||||
}
|
||||
let envs = [];
|
||||
let buildEnvs = [];
|
||||
if (secrets.length > 0) {
|
||||
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
|
||||
buildEnvs = [...buildEnvs, ...generateSecrets(secrets, pullmergeRequestId, true, null, true)];
|
||||
}
|
||||
|
||||
const composeVolumes = [];
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
let [v, path] = volume.split(':');
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let networks = {};
|
||||
for (let [key, value] of Object.entries(dockerComposeYaml.services)) {
|
||||
value['container_name'] = `${applicationId}-${key}`;
|
||||
|
||||
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
|
||||
if (Object.keys(environment).length > 0) {
|
||||
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
|
||||
}
|
||||
value['environment'] = [...environment, ...envs];
|
||||
|
||||
let build = typeof value['build'] === 'undefined' ? [] : value['build'];
|
||||
if (typeof build === 'string') {
|
||||
build = { context: build };
|
||||
}
|
||||
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
|
||||
let finalArgs = [...buildEnvs];
|
||||
if (Object.keys(buildArgs).length > 0) {
|
||||
for (const arg of buildArgs) {
|
||||
const [key, _] = arg.split('=');
|
||||
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
|
||||
finalArgs.push(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
value['build'] = {
|
||||
...build,
|
||||
args: finalArgs
|
||||
};
|
||||
|
||||
value['labels'] = labels;
|
||||
// TODO: If we support separated volume for each service, we need to add it here
|
||||
if (value['volumes']?.length > 0) {
|
||||
value['volumes'] = value['volumes'].map((volume) => {
|
||||
let [v, path, permission] = volume.split(':');
|
||||
if (!path) {
|
||||
path = v;
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
} else {
|
||||
v = `${applicationId}${v.replace(/\//gi, '-').replace(/\./gi, '')}`;
|
||||
}
|
||||
composeVolumes[v] = {
|
||||
name: v
|
||||
};
|
||||
return `${v}:${path}${permission ? ':' + permission : ''}`;
|
||||
});
|
||||
}
|
||||
if (volumes.length > 0) {
|
||||
for (const volume of volumes) {
|
||||
value['volumes'].push(volume);
|
||||
}
|
||||
}
|
||||
if (dockerComposeConfiguration[key].port) {
|
||||
value['expose'] = [dockerComposeConfiguration[key].port];
|
||||
}
|
||||
if (value['networks']?.length > 0) {
|
||||
value['networks'].forEach((network) => {
|
||||
networks[network] = {
|
||||
name: network
|
||||
};
|
||||
});
|
||||
}
|
||||
value['networks'] = [...(value['networks'] || ''), network];
|
||||
dockerComposeYaml.services[key] = {
|
||||
...dockerComposeYaml.services[key],
|
||||
restart: defaultComposeConfiguration(network).restart,
|
||||
deploy: defaultComposeConfiguration(network).deploy
|
||||
};
|
||||
}
|
||||
if (Object.keys(composeVolumes).length > 0) {
|
||||
dockerComposeYaml['volumes'] = { ...composeVolumes };
|
||||
}
|
||||
dockerComposeYaml['networks'] = Object.assign({ ...networks }, { [network]: { external: true } });
|
||||
|
||||
await fs.writeFile(fileYaml, yaml.dump(dockerComposeYaml));
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} -f ${fileYaml} pull`
|
||||
});
|
||||
await saveBuildLog({ line: 'Pulling images from Compose file...', buildId, applicationId });
|
||||
await executeCommand({
|
||||
debug,
|
||||
buildId,
|
||||
applicationId,
|
||||
dockerId,
|
||||
command: `docker compose --project-directory ${workdir} -f ${fileYaml} build --progress plain`
|
||||
});
|
||||
await saveBuildLog({ line: 'Building images from Compose file...', buildId, applicationId });
|
||||
}
|
||||
52
apps/trpc-experimental/server/src/lib/buildPacks/deno.ts
Normal file
52
apps/trpc-experimental/server/src/lib/buildPacks/deno.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
denoMainFile,
|
||||
denoOptions,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
let depsFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/deps.ts`);
|
||||
depsFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (depsFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/deps.ts /app`);
|
||||
Dockerfile.push(`RUN deno cache deps.ts`);
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN deno cache ${denoMainFile}`);
|
||||
Dockerfile.push(`ENV NO_COLOR true`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD deno run ${denoOptions || ''} ${denoMainFile}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
27
apps/trpc-experimental/server/src/lib/buildPacks/docker.ts
Normal file
27
apps/trpc-experimental/server/src/lib/buildPacks/docker.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
export default async function (data) {
|
||||
let { workdir, buildId, baseDirectory, secrets, pullmergeRequestId, dockerFileLocation } = data;
|
||||
const file = `${workdir}${baseDirectory}${dockerFileLocation}`;
|
||||
data.workdir = `${workdir}${baseDirectory}`;
|
||||
const DockerfileRaw = await fs.readFile(`${file}`, 'utf8');
|
||||
const Dockerfile: Array<string> = DockerfileRaw.toString().trim().split('\n');
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, `LABEL coolify.buildId=${buildId}`);
|
||||
}
|
||||
});
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.forEach((line, index) => {
|
||||
if (line.startsWith('FROM')) {
|
||||
Dockerfile.splice(index + 1, 0, env);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
await fs.writeFile(`${data.workdir}${dockerFileLocation}`, Dockerfile.join('\n'));
|
||||
await buildImage(data);
|
||||
}
|
||||
28
apps/trpc-experimental/server/src/lib/buildPacks/gatsby.ts
Normal file
28
apps/trpc-experimental/server/src/lib/buildPacks/gatsby.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, imageforBuild): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${imageforBuild}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
17
apps/trpc-experimental/server/src/lib/buildPacks/heroku.ts
Normal file
17
apps/trpc-experimental/server/src/lib/buildPacks/heroku.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { executeCommand } from "../executeCommand";
|
||||
import { saveBuildLog } from "./common";
|
||||
|
||||
export default async function (data: any): Promise<void> {
|
||||
const { buildId, applicationId, tag, dockerId, debug, workdir, baseDirectory, baseImage } = data
|
||||
try {
|
||||
await saveBuildLog({ line: `Building production image...`, buildId, applicationId });
|
||||
await executeCommand({
|
||||
buildId,
|
||||
debug,
|
||||
dockerId,
|
||||
command: `pack build -p ${workdir}${baseDirectory} ${applicationId}:${tag} --builder ${baseImage}`
|
||||
})
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
41
apps/trpc-experimental/server/src/lib/buildPacks/index.ts
Normal file
41
apps/trpc-experimental/server/src/lib/buildPacks/index.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import node from './node';
|
||||
import staticApp from './static';
|
||||
import docker from './docker';
|
||||
import gatsby from './gatsby';
|
||||
import svelte from './svelte';
|
||||
import react from './react';
|
||||
import nestjs from './nestjs';
|
||||
import nextjs from './nextjs';
|
||||
import nuxtjs from './nuxtjs';
|
||||
import vuejs from './vuejs';
|
||||
import php from './php';
|
||||
import rust from './rust';
|
||||
import astro from './static';
|
||||
import eleventy from './static';
|
||||
import python from './python';
|
||||
import deno from './deno';
|
||||
import laravel from './laravel';
|
||||
import heroku from './heroku';
|
||||
import compose from './compose';
|
||||
|
||||
export {
|
||||
node,
|
||||
staticApp,
|
||||
docker,
|
||||
gatsby,
|
||||
svelte,
|
||||
react,
|
||||
nestjs,
|
||||
nextjs,
|
||||
nuxtjs,
|
||||
vuejs,
|
||||
php,
|
||||
rust,
|
||||
astro,
|
||||
eleventy,
|
||||
python,
|
||||
deno,
|
||||
laravel,
|
||||
heroku,
|
||||
compose
|
||||
};
|
||||
46
apps/trpc-experimental/server/src/lib/buildPacks/laravel.ts
Normal file
46
apps/trpc-experimental/server/src/lib/buildPacks/laravel.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageForLaravel, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { workdir, applicationId, tag, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`ENV WEB_DOCUMENT_ROOT /app/public`);
|
||||
Dockerfile.push(`COPY --chown=application:application composer.* ./`);
|
||||
Dockerfile.push(`COPY --chown=application:application database/ database/`);
|
||||
Dockerfile.push(
|
||||
`RUN composer install --ignore-platform-reqs --no-interaction --no-plugins --no-scripts --prefer-dist`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/js/ /app/public/js/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/public/css/ /app/public/css/`
|
||||
);
|
||||
Dockerfile.push(
|
||||
`COPY --chown=application:application --from=${applicationId}:${tag}-cache /app/mix-manifest.json /app/public/mix-manifest.json`
|
||||
);
|
||||
Dockerfile.push(`COPY --chown=application:application . ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
try {
|
||||
await buildCacheImageForLaravel(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
31
apps/trpc-experimental/server/src/lib/buildPacks/nestjs.ts
Normal file
31
apps/trpc-experimental/server/src/lib/buildPacks/nestjs.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { buildId, applicationId, tag, port, startCommand, workdir, baseDirectory } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = startCommand.includes('pnpm');
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${baseDirectory || ''} ./`);
|
||||
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
66
apps/trpc-experimental/server/src/lib/buildPacks/nextjs.ts
Normal file
66
apps/trpc-experimental/server/src/lib/buildPacks/nextjs.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
if (deploymentType === 'node') {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === 'node') {
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
49
apps/trpc-experimental/server/src/lib/buildPacks/node.ts
Normal file
49
apps/trpc-experimental/server/src/lib/buildPacks/node.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
66
apps/trpc-experimental/server/src/lib/buildPacks/nuxtjs.ts
Normal file
66
apps/trpc-experimental/server/src/lib/buildPacks/nuxtjs.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage, checkPnpm } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
buildId,
|
||||
tag,
|
||||
workdir,
|
||||
publishDirectory,
|
||||
port,
|
||||
installCommand,
|
||||
buildCommand,
|
||||
startCommand,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
deploymentType,
|
||||
baseImage
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
const isPnpm = checkPnpm(installCommand, buildCommand, startCommand);
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (isPnpm) {
|
||||
Dockerfile.push('RUN curl -f https://get.pnpm.io/v6.16.js | node - add --global pnpm@7');
|
||||
}
|
||||
if (deploymentType === 'node') {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`RUN ${installCommand}`);
|
||||
Dockerfile.push(`RUN ${buildCommand}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ${startCommand}`);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
Dockerfile.push(`EXPOSE 80`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage, deploymentType, buildCommand } = data;
|
||||
if (deploymentType === 'node') {
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} else if (deploymentType === 'static') {
|
||||
if (buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
50
apps/trpc-experimental/server/src/lib/buildPacks/php.ts
Normal file
50
apps/trpc-experimental/server/src/lib/buildPacks/php.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image, htaccessFound): Promise<void> => {
|
||||
const { workdir, baseDirectory, buildId, port, secrets, pullmergeRequestId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
let composerFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/composer.json`);
|
||||
composerFound = true;
|
||||
} catch (error) {}
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} /app`);
|
||||
if (htaccessFound) {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/.htaccess ./`);
|
||||
}
|
||||
if (composerFound) {
|
||||
Dockerfile.push(`RUN composer install`);
|
||||
}
|
||||
|
||||
Dockerfile.push(`COPY /entrypoint.sh /opt/docker/provision/entrypoint.d/30-entrypoint.sh`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
const { workdir, baseDirectory, baseImage } = data;
|
||||
try {
|
||||
let htaccessFound = false;
|
||||
try {
|
||||
await fs.readFile(`${workdir}${baseDirectory || ''}/.htaccess`);
|
||||
htaccessFound = true;
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
await createDockerfile(data, baseImage, htaccessFound);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
67
apps/trpc-experimental/server/src/lib/buildPacks/python.ts
Normal file
67
apps/trpc-experimental/server/src/lib/buildPacks/python.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
workdir,
|
||||
port,
|
||||
baseDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
pythonWSGI,
|
||||
pythonModule,
|
||||
pythonVariable,
|
||||
buildId
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`RUN pip install gunicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`RUN pip install uvicorn`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(`RUN apk add --no-cache uwsgi-python3`);
|
||||
// Dockerfile.push(`RUN pip install --no-cache-dir uwsgi`)
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.stat(`${workdir}${baseDirectory || ''}/requirements.txt`);
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''}/requirements.txt ./`);
|
||||
Dockerfile.push(`RUN pip install --no-cache-dir -r .${baseDirectory || ''}/requirements.txt`);
|
||||
} catch (e) {
|
||||
//
|
||||
}
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
if (pythonWSGI?.toLowerCase() === 'gunicorn') {
|
||||
Dockerfile.push(`CMD gunicorn -w=4 -b=0.0.0.0:8000 ${pythonModule}:${pythonVariable}`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uvicorn') {
|
||||
Dockerfile.push(`CMD uvicorn ${pythonModule}:${pythonVariable} --port ${port} --host 0.0.0.0`);
|
||||
} else if (pythonWSGI?.toLowerCase() === 'uwsgi') {
|
||||
Dockerfile.push(
|
||||
`CMD uwsgi --master -p 4 --http-socket 0.0.0.0:8000 --uid uwsgi --plugins python3 --protocol uwsgi --wsgi ${pythonModule}:${pythonVariable}`
|
||||
);
|
||||
} else {
|
||||
Dockerfile.push(`CMD python ${pythonModule}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/trpc-experimental/server/src/lib/buildPacks/react.ts
Normal file
28
apps/trpc-experimental/server/src/lib/buildPacks/react.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
40
apps/trpc-experimental/server/src/lib/buildPacks/rust.ts
Normal file
40
apps/trpc-experimental/server/src/lib/buildPacks/rust.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import TOML from '@iarna/toml';
|
||||
import { buildCacheImageWithCargo, buildImage } from './common';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
const createDockerfile = async (data, image, name): Promise<void> => {
|
||||
const { workdir, port, applicationId, tag, buildId } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target target`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /usr/local/cargo /usr/local/cargo`);
|
||||
Dockerfile.push(`COPY . .`);
|
||||
Dockerfile.push(`RUN cargo build --release --bin ${name}`);
|
||||
Dockerfile.push('FROM debian:buster-slim');
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(
|
||||
`RUN apt-get update -y && apt-get install -y --no-install-recommends openssl libcurl4 ca-certificates && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*`
|
||||
);
|
||||
Dockerfile.push(`RUN update-ca-certificates`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/target/release/${name} ${name}`);
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
Dockerfile.push(`CMD ["/app/${name}"]`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { workdir, baseImage, baseBuildImage } = data;
|
||||
const { stdout: cargoToml } = await executeCommand({ command: `cat ${workdir}/Cargo.toml` });
|
||||
const parsedToml: any = TOML.parse(cargoToml);
|
||||
const name = parsedToml.package.name;
|
||||
await buildCacheImageWithCargo(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage, name);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
54
apps/trpc-experimental/server/src/lib/buildPacks/static.ts
Normal file
54
apps/trpc-experimental/server/src/lib/buildPacks/static.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { generateSecrets } from '../common';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const {
|
||||
applicationId,
|
||||
tag,
|
||||
workdir,
|
||||
buildCommand,
|
||||
baseDirectory,
|
||||
publishDirectory,
|
||||
secrets,
|
||||
pullmergeRequestId,
|
||||
baseImage,
|
||||
buildId,
|
||||
port
|
||||
} = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
if (baseImage?.includes('httpd')) {
|
||||
Dockerfile.push('WORKDIR /usr/local/apache2/htdocs/');
|
||||
} else {
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
}
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
if (secrets.length > 0) {
|
||||
generateSecrets(secrets, pullmergeRequestId, true).forEach((env) => {
|
||||
Dockerfile.push(env);
|
||||
});
|
||||
}
|
||||
if (buildCommand) {
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
} else {
|
||||
Dockerfile.push(`COPY .${baseDirectory || ''} ./`);
|
||||
}
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
if (data.buildCommand) await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/trpc-experimental/server/src/lib/buildPacks/svelte.ts
Normal file
28
apps/trpc-experimental/server/src/lib/buildPacks/svelte.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
28
apps/trpc-experimental/server/src/lib/buildPacks/vuejs.ts
Normal file
28
apps/trpc-experimental/server/src/lib/buildPacks/vuejs.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { buildCacheImageWithNode, buildImage } from './common';
|
||||
|
||||
const createDockerfile = async (data, image): Promise<void> => {
|
||||
const { applicationId, tag, workdir, publishDirectory, baseImage, buildId, port } = data;
|
||||
const Dockerfile: Array<string> = [];
|
||||
|
||||
Dockerfile.push(`FROM ${image}`);
|
||||
Dockerfile.push('WORKDIR /app');
|
||||
Dockerfile.push(`LABEL coolify.buildId=${buildId}`);
|
||||
Dockerfile.push(`COPY --from=${applicationId}:${tag}-cache /app/${publishDirectory} ./`);
|
||||
if (baseImage?.includes('nginx')) {
|
||||
Dockerfile.push(`COPY /nginx.conf /etc/nginx/nginx.conf`);
|
||||
}
|
||||
Dockerfile.push(`EXPOSE ${port}`);
|
||||
await fs.writeFile(`${workdir}/Dockerfile`, Dockerfile.join('\n'));
|
||||
};
|
||||
|
||||
export default async function (data) {
|
||||
try {
|
||||
const { baseImage, baseBuildImage } = data;
|
||||
await buildCacheImageWithNode(data, baseBuildImage);
|
||||
await createDockerfile(data, baseImage);
|
||||
await buildImage(data);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
939
apps/trpc-experimental/server/src/lib/common.ts
Normal file
939
apps/trpc-experimental/server/src/lib/common.ts
Normal file
@@ -0,0 +1,939 @@
|
||||
import type { Permission, Setting, Team, TeamInvitation, User } from '@prisma/client';
|
||||
import { prisma } from '../prisma';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import crypto from 'crypto';
|
||||
import { promises as dns } from 'dns';
|
||||
import fs from 'fs/promises';
|
||||
import { uniqueNamesGenerator, adjectives, colors, animals } from 'unique-names-generator';
|
||||
import type { Config } from 'unique-names-generator';
|
||||
import { env } from '../env';
|
||||
import { day } from './dayjs';
|
||||
import { executeCommand } from './executeCommand';
|
||||
import { saveBuildLog } from './logging';
|
||||
import { checkContainer } from './docker';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
const customConfig: Config = {
|
||||
dictionaries: [adjectives, colors, animals],
|
||||
style: 'capital',
|
||||
separator: ' ',
|
||||
length: 3
|
||||
};
|
||||
const algorithm = 'aes-256-ctr';
|
||||
export const isDev = env.NODE_ENV === 'development';
|
||||
export const version = '3.13.0';
|
||||
export const sentryDSN =
|
||||
'https://409f09bcb7af47928d3e0f46b78987f3@o1082494.ingest.sentry.io/4504236622217216';
|
||||
export const defaultTraefikImage = `traefik:v2.8`;
|
||||
export function getAPIUrl() {
|
||||
if (process.env.GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(process.env.GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace('https://', 'https://3001-').replace(/\/$/, '');
|
||||
return newURL;
|
||||
}
|
||||
if (process.env.CODESANDBOX_HOST) {
|
||||
return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3001')}`;
|
||||
}
|
||||
return isDev ? 'http://host.docker.internal:3001' : 'http://localhost:3000';
|
||||
}
|
||||
|
||||
export function getUIUrl() {
|
||||
if (process.env.GITPOD_WORKSPACE_URL) {
|
||||
const { href } = new URL(process.env.GITPOD_WORKSPACE_URL);
|
||||
const newURL = href.replace('https://', 'https://3000-').replace(/\/$/, '');
|
||||
return newURL;
|
||||
}
|
||||
if (process.env.CODESANDBOX_HOST) {
|
||||
return `https://${process.env.CODESANDBOX_HOST.replace(/\$PORT/, '3000')}`;
|
||||
}
|
||||
return 'http://localhost:3000';
|
||||
}
|
||||
const mainTraefikEndpoint = isDev
|
||||
? `${getAPIUrl()}/webhooks/traefik/main.json`
|
||||
: 'http://coolify:3000/webhooks/traefik/main.json';
|
||||
|
||||
const otherTraefikEndpoint = isDev
|
||||
? `${getAPIUrl()}/webhooks/traefik/other.json`
|
||||
: 'http://coolify:3000/webhooks/traefik/other.json';
|
||||
|
||||
export async function listSettings(): Promise<Setting | null> {
|
||||
return await prisma.setting.findUnique({ where: { id: '0' } });
|
||||
}
|
||||
export async function getCurrentUser(
|
||||
userId: string
|
||||
): Promise<(User & { permission: Permission[]; teams: Team[] }) | null> {
|
||||
return await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: { teams: true, permission: true }
|
||||
});
|
||||
}
|
||||
export async function getTeamInvitation(userId: string): Promise<TeamInvitation[]> {
|
||||
return await prisma.teamInvitation.findMany({ where: { uid: userId } });
|
||||
}
|
||||
|
||||
export async function hashPassword(password: string): Promise<string> {
|
||||
const saltRounds = 15;
|
||||
return bcrypt.hash(password, saltRounds);
|
||||
}
|
||||
export async function comparePassword(password: string, hashedPassword: string): Promise<boolean> {
|
||||
return bcrypt.compare(password, hashedPassword);
|
||||
}
|
||||
export const uniqueName = (): string => uniqueNamesGenerator(customConfig);
|
||||
|
||||
export const decrypt = (hashString: string) => {
|
||||
if (hashString) {
|
||||
try {
|
||||
const hash = JSON.parse(hashString);
|
||||
const decipher = crypto.createDecipheriv(
|
||||
algorithm,
|
||||
env.COOLIFY_SECRET_KEY,
|
||||
Buffer.from(hash.iv, 'hex')
|
||||
);
|
||||
const decrpyted = Buffer.concat([
|
||||
decipher.update(Buffer.from(hash.content, 'hex')),
|
||||
decipher.final()
|
||||
]);
|
||||
return decrpyted.toString();
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
console.log({ decryptionError: error.message });
|
||||
}
|
||||
return hashString;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export function generateRangeArray(start: number, end: number) {
|
||||
return Array.from({ length: end - start }, (_v, k) => k + start);
|
||||
}
|
||||
export function generateTimestamp(): string {
|
||||
return `${day().format('HH:mm:ss.SSS')}`;
|
||||
}
|
||||
export const encrypt = (text: string) => {
|
||||
if (text) {
|
||||
const iv = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv(algorithm, env.COOLIFY_SECRET_KEY, iv);
|
||||
const encrypted = Buffer.concat([cipher.update(text.trim()), cipher.final()]);
|
||||
return JSON.stringify({
|
||||
iv: iv.toString('hex'),
|
||||
content: encrypted.toString('hex')
|
||||
});
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
export async function getTemplates() {
|
||||
const templatePath = isDev ? './templates.json' : '/app/templates.json';
|
||||
const open = await fs.open(templatePath, 'r');
|
||||
try {
|
||||
let data = await open.readFile({ encoding: 'utf-8' });
|
||||
let jsonData = JSON.parse(data);
|
||||
if (isARM(process.arch)) {
|
||||
jsonData = jsonData.filter((d: { arch: string }) => d.arch !== 'amd64');
|
||||
}
|
||||
return jsonData;
|
||||
} catch (error) {
|
||||
return [];
|
||||
} finally {
|
||||
await open?.close();
|
||||
}
|
||||
}
|
||||
export function isARM(arch: string) {
|
||||
if (arch === 'arm' || arch === 'arm64' || arch === 'aarch' || arch === 'aarch64') {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function removeService({ id }: { id: string }): Promise<void> {
|
||||
await prisma.serviceSecret.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.serviceSetting.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.servicePersistentStorage.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.meiliSearch.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.fider.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.ghost.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.umami.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.hasura.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.plausibleAnalytics.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.minio.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.vscodeserver.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.wordpress.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.glitchTip.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.moodle.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.appwrite.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.searxng.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.weblate.deleteMany({ where: { serviceId: id } });
|
||||
await prisma.taiga.deleteMany({ where: { serviceId: id } });
|
||||
|
||||
await prisma.service.delete({ where: { id } });
|
||||
}
|
||||
|
||||
export const createDirectories = async ({
|
||||
repository,
|
||||
buildId
|
||||
}: {
|
||||
repository: string;
|
||||
buildId: string;
|
||||
}): Promise<{ workdir: string; repodir: string }> => {
|
||||
if (repository) repository = repository.replaceAll(' ', '');
|
||||
const repodir = `/tmp/build-sources/${repository}/`;
|
||||
const workdir = `/tmp/build-sources/${repository}/${buildId}`;
|
||||
let workdirFound = false;
|
||||
try {
|
||||
workdirFound = !!(await fs.stat(workdir));
|
||||
} catch (error) {}
|
||||
if (workdirFound) {
|
||||
await executeCommand({ command: `rm -fr ${workdir}` });
|
||||
}
|
||||
await executeCommand({ command: `mkdir -p ${workdir}` });
|
||||
return {
|
||||
workdir,
|
||||
repodir
|
||||
};
|
||||
};
|
||||
|
||||
export async function saveDockerRegistryCredentials({ url, username, password, workdir }) {
|
||||
if (!username || !password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let decryptedPassword = decrypt(password);
|
||||
const location = `${workdir}/.docker`;
|
||||
|
||||
try {
|
||||
await fs.mkdir(`${workdir}/.docker`);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
const payload = JSON.stringify({
|
||||
auths: {
|
||||
[url]: {
|
||||
auth: Buffer.from(`${username}:${decryptedPassword}`).toString('base64')
|
||||
}
|
||||
}
|
||||
});
|
||||
await fs.writeFile(`${location}/config.json`, payload);
|
||||
return location;
|
||||
}
|
||||
export function getDomain(domain: string): string {
|
||||
if (domain) {
|
||||
return domain?.replace('https://', '').replace('http://', '');
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export async function isDomainConfigured({
|
||||
id,
|
||||
fqdn,
|
||||
checkOwn = false,
|
||||
remoteIpAddress = undefined
|
||||
}: {
|
||||
id: string;
|
||||
fqdn: string;
|
||||
checkOwn?: boolean;
|
||||
remoteIpAddress?: string;
|
||||
}): Promise<boolean> {
|
||||
const domain = getDomain(fqdn);
|
||||
const nakedDomain = domain.replace('www.', '');
|
||||
const foundApp = await prisma.application.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } },
|
||||
{ dockerComposeConfiguration: { contains: `//${nakedDomain}` } },
|
||||
{ dockerComposeConfiguration: { contains: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: id },
|
||||
destinationDocker: {
|
||||
remoteIpAddress
|
||||
}
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
const foundService = await prisma.service.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: checkOwn ? undefined : id },
|
||||
destinationDocker: {
|
||||
remoteIpAddress
|
||||
}
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
|
||||
const coolifyFqdn = await prisma.setting.findFirst({
|
||||
where: {
|
||||
OR: [
|
||||
{ fqdn: { endsWith: `//${nakedDomain}` } },
|
||||
{ fqdn: { endsWith: `//www.${nakedDomain}` } }
|
||||
],
|
||||
id: { not: id }
|
||||
},
|
||||
select: { fqdn: true }
|
||||
});
|
||||
return !!(foundApp || foundService || coolifyFqdn);
|
||||
}
|
||||
|
||||
export async function checkExposedPort({
|
||||
id,
|
||||
configuredPort,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
}: {
|
||||
id: string;
|
||||
configuredPort?: number;
|
||||
exposePort: number;
|
||||
engine: string;
|
||||
remoteEngine: boolean;
|
||||
remoteIpAddress?: string;
|
||||
}) {
|
||||
if (exposePort < 1024 || exposePort > 65535) {
|
||||
throw { status: 500, message: `Exposed Port needs to be between 1024 and 65535.` };
|
||||
}
|
||||
if (configuredPort) {
|
||||
if (configuredPort !== exposePort) {
|
||||
const availablePort = await getFreeExposedPort(
|
||||
id,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
);
|
||||
if (availablePort.toString() !== exposePort.toString()) {
|
||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const availablePort = await getFreeExposedPort(
|
||||
id,
|
||||
exposePort,
|
||||
engine,
|
||||
remoteEngine,
|
||||
remoteIpAddress
|
||||
);
|
||||
if (availablePort.toString() !== exposePort.toString()) {
|
||||
throw { status: 500, message: `Port ${exposePort} is already in use.` };
|
||||
}
|
||||
}
|
||||
}
|
||||
export async function getFreeExposedPort(id, exposePort, engine, remoteEngine, remoteIpAddress) {
|
||||
const { default: checkPort } = await import('is-port-reachable');
|
||||
if (remoteEngine) {
|
||||
const applicationUsed = await (
|
||||
await prisma.application.findMany({
|
||||
where: {
|
||||
exposePort: { not: null },
|
||||
id: { not: id },
|
||||
destinationDocker: { remoteIpAddress }
|
||||
},
|
||||
select: { exposePort: true }
|
||||
})
|
||||
).map((a) => a.exposePort);
|
||||
const serviceUsed = await (
|
||||
await prisma.service.findMany({
|
||||
where: {
|
||||
exposePort: { not: null },
|
||||
id: { not: id },
|
||||
destinationDocker: { remoteIpAddress }
|
||||
},
|
||||
select: { exposePort: true }
|
||||
})
|
||||
).map((a) => a.exposePort);
|
||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||
if (usedPorts.includes(exposePort)) {
|
||||
return false;
|
||||
}
|
||||
const found = await checkPort(exposePort, { host: remoteIpAddress });
|
||||
if (!found) {
|
||||
return exposePort;
|
||||
}
|
||||
return false;
|
||||
} else {
|
||||
const applicationUsed = await (
|
||||
await prisma.application.findMany({
|
||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { engine } },
|
||||
select: { exposePort: true }
|
||||
})
|
||||
).map((a) => a.exposePort);
|
||||
const serviceUsed = await (
|
||||
await prisma.service.findMany({
|
||||
where: { exposePort: { not: null }, id: { not: id }, destinationDocker: { engine } },
|
||||
select: { exposePort: true }
|
||||
})
|
||||
).map((a) => a.exposePort);
|
||||
const usedPorts = [...applicationUsed, ...serviceUsed];
|
||||
if (usedPorts.includes(exposePort)) {
|
||||
return false;
|
||||
}
|
||||
const found = await checkPort(exposePort, { host: 'localhost' });
|
||||
if (!found) {
|
||||
return exposePort;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function checkDomainsIsValidInDNS({ hostname, fqdn, dualCerts }): Promise<any> {
|
||||
const { isIP } = await import('is-ip');
|
||||
const domain = getDomain(fqdn);
|
||||
const domainDualCert = domain.includes('www.') ? domain.replace('www.', '') : `www.${domain}`;
|
||||
|
||||
const { DNSServers } = await listSettings();
|
||||
if (DNSServers) {
|
||||
dns.setServers([...DNSServers.split(',')]);
|
||||
}
|
||||
|
||||
let resolves = [];
|
||||
try {
|
||||
if (isIP(hostname)) {
|
||||
resolves = [hostname];
|
||||
} else {
|
||||
resolves = await dns.resolve4(hostname);
|
||||
}
|
||||
} catch (error) {
|
||||
throw { status: 500, message: `Could not determine IP address for ${hostname}.` };
|
||||
}
|
||||
|
||||
if (dualCerts) {
|
||||
try {
|
||||
const ipDomain = await dns.resolve4(domain);
|
||||
const ipDomainDualCert = await dns.resolve4(domainDualCert);
|
||||
|
||||
let ipDomainFound = false;
|
||||
let ipDomainDualCertFound = false;
|
||||
|
||||
for (const ip of ipDomain) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainFound = true;
|
||||
}
|
||||
}
|
||||
for (const ip of ipDomainDualCert) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainDualCertFound = true;
|
||||
}
|
||||
}
|
||||
if (ipDomainFound && ipDomainDualCertFound) return { status: 200 };
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
} catch (error) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const ipDomain = await dns.resolve4(domain);
|
||||
let ipDomainFound = false;
|
||||
for (const ip of ipDomain) {
|
||||
if (resolves.includes(ip)) {
|
||||
ipDomainFound = true;
|
||||
}
|
||||
}
|
||||
if (ipDomainFound) return { status: 200 };
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
} catch (error) {
|
||||
throw {
|
||||
status: 500,
|
||||
message: `DNS not set correctly or propogated.<br>Please check your DNS settings.`
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
export const setDefaultConfiguration = async (data: any) => {
|
||||
let {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
} = data;
|
||||
//@ts-ignore
|
||||
const template = scanningTemplates[buildPack];
|
||||
if (!port) {
|
||||
port = template?.port || 3000;
|
||||
|
||||
if (buildPack === 'static') port = 80;
|
||||
else if (buildPack === 'node') port = 3000;
|
||||
else if (buildPack === 'php') port = 80;
|
||||
else if (buildPack === 'python') port = 8000;
|
||||
}
|
||||
if (!installCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
installCommand = template?.installCommand || 'yarn install';
|
||||
if (!startCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
startCommand = template?.startCommand || 'yarn start';
|
||||
if (!buildCommand && buildPack !== 'static' && buildPack !== 'laravel')
|
||||
buildCommand = template?.buildCommand || null;
|
||||
if (!publishDirectory) publishDirectory = template?.publishDirectory || null;
|
||||
if (baseDirectory) {
|
||||
if (!baseDirectory.startsWith('/')) baseDirectory = `/${baseDirectory}`;
|
||||
if (baseDirectory.endsWith('/') && baseDirectory !== '/')
|
||||
baseDirectory = baseDirectory.slice(0, -1);
|
||||
}
|
||||
if (dockerFileLocation) {
|
||||
if (!dockerFileLocation.startsWith('/')) dockerFileLocation = `/${dockerFileLocation}`;
|
||||
if (dockerFileLocation.endsWith('/')) dockerFileLocation = dockerFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (dockerComposeFileLocation) {
|
||||
if (!dockerComposeFileLocation.startsWith('/'))
|
||||
dockerComposeFileLocation = `/${dockerComposeFileLocation}`;
|
||||
if (dockerComposeFileLocation.endsWith('/'))
|
||||
dockerComposeFileLocation = dockerComposeFileLocation.slice(0, -1);
|
||||
} else {
|
||||
dockerComposeFileLocation = '/Dockerfile';
|
||||
}
|
||||
if (!denoMainFile) {
|
||||
denoMainFile = 'main.ts';
|
||||
}
|
||||
|
||||
return {
|
||||
buildPack,
|
||||
port,
|
||||
installCommand,
|
||||
startCommand,
|
||||
buildCommand,
|
||||
publishDirectory,
|
||||
baseDirectory,
|
||||
dockerFileLocation,
|
||||
dockerComposeFileLocation,
|
||||
denoMainFile
|
||||
};
|
||||
};
|
||||
|
||||
export const scanningTemplates = {
|
||||
'@sveltejs/kit': {
|
||||
buildPack: 'nodejs'
|
||||
},
|
||||
astro: {
|
||||
buildPack: 'astro'
|
||||
},
|
||||
'@11ty/eleventy': {
|
||||
buildPack: 'eleventy'
|
||||
},
|
||||
svelte: {
|
||||
buildPack: 'svelte'
|
||||
},
|
||||
'@nestjs/core': {
|
||||
buildPack: 'nestjs'
|
||||
},
|
||||
next: {
|
||||
buildPack: 'nextjs'
|
||||
},
|
||||
nuxt: {
|
||||
buildPack: 'nuxtjs'
|
||||
},
|
||||
'react-scripts': {
|
||||
buildPack: 'react'
|
||||
},
|
||||
'parcel-bundler': {
|
||||
buildPack: 'static'
|
||||
},
|
||||
'@vue/cli-service': {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
vuejs: {
|
||||
buildPack: 'vuejs'
|
||||
},
|
||||
gatsby: {
|
||||
buildPack: 'gatsby'
|
||||
},
|
||||
'preact-cli': {
|
||||
buildPack: 'react'
|
||||
}
|
||||
};
|
||||
|
||||
export async function cleanupDB(buildId: string, applicationId: string) {
|
||||
const data = await prisma.build.findUnique({ where: { id: buildId } });
|
||||
if (data?.status === 'queued' || data?.status === 'running') {
|
||||
await prisma.build.update({ where: { id: buildId }, data: { status: 'canceled' } });
|
||||
}
|
||||
await saveBuildLog({ line: 'Canceled.', buildId, applicationId });
|
||||
}
|
||||
|
||||
export const base64Encode = (text: string): string => {
|
||||
return Buffer.from(text).toString('base64');
|
||||
};
|
||||
export const base64Decode = (text: string): string => {
|
||||
return Buffer.from(text, 'base64').toString('ascii');
|
||||
};
|
||||
function parseSecret(secret, isBuild) {
|
||||
if (secret.value.includes('$')) {
|
||||
secret.value = secret.value.replaceAll('$', '$$$$');
|
||||
}
|
||||
if (secret.value.includes('\\n')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
} else if (secret.value.includes(' ')) {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}='${secret.value}'`;
|
||||
} else {
|
||||
return `${secret.name}='${secret.value}'`;
|
||||
}
|
||||
} else {
|
||||
if (isBuild) {
|
||||
return `ARG ${secret.name}=${secret.value}`;
|
||||
} else {
|
||||
return `${secret.name}=${secret.value}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
export function generateSecrets(
|
||||
secrets: Array<any>,
|
||||
pullmergeRequestId: string,
|
||||
isBuild = false,
|
||||
port = null,
|
||||
compose = false
|
||||
): Array<string> {
|
||||
const envs = [];
|
||||
const isPRMRSecret = secrets.filter((s) => s.isPRMRSecret);
|
||||
const normalSecrets = secrets.filter((s) => !s.isPRMRSecret);
|
||||
if (pullmergeRequestId && isPRMRSecret.length > 0) {
|
||||
isPRMRSecret.forEach((secret) => {
|
||||
if ((isBuild && !secret.isBuildSecret) || (!isBuild && secret.isBuildSecret)) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, compose ? false : build));
|
||||
});
|
||||
}
|
||||
if (!pullmergeRequestId && normalSecrets.length > 0) {
|
||||
normalSecrets.forEach((secret) => {
|
||||
if ((isBuild && !secret.isBuildSecret) || (!isBuild && secret.isBuildSecret)) {
|
||||
return;
|
||||
}
|
||||
const build = isBuild && secret.isBuildSecret;
|
||||
envs.push(parseSecret(secret, compose ? false : build));
|
||||
});
|
||||
}
|
||||
const portFound = envs.filter((env) => env.startsWith('PORT'));
|
||||
if (portFound.length === 0 && port && !isBuild) {
|
||||
envs.push(`PORT=${port}`);
|
||||
}
|
||||
const nodeEnv = envs.filter((env) => env.startsWith('NODE_ENV'));
|
||||
if (nodeEnv.length === 0 && !isBuild) {
|
||||
envs.push(`NODE_ENV=production`);
|
||||
}
|
||||
return envs;
|
||||
}
|
||||
export function decryptApplication(application: any) {
|
||||
if (application) {
|
||||
if (application?.gitSource?.githubApp?.clientSecret) {
|
||||
application.gitSource.githubApp.clientSecret =
|
||||
decrypt(application.gitSource.githubApp.clientSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.webhookSecret) {
|
||||
application.gitSource.githubApp.webhookSecret =
|
||||
decrypt(application.gitSource.githubApp.webhookSecret) || null;
|
||||
}
|
||||
if (application?.gitSource?.githubApp?.privateKey) {
|
||||
application.gitSource.githubApp.privateKey =
|
||||
decrypt(application.gitSource.githubApp.privateKey) || null;
|
||||
}
|
||||
if (application?.gitSource?.gitlabApp?.appSecret) {
|
||||
application.gitSource.gitlabApp.appSecret =
|
||||
decrypt(application.gitSource.gitlabApp.appSecret) || null;
|
||||
}
|
||||
if (application?.secrets.length > 0) {
|
||||
application.secrets = application.secrets.map((s: any) => {
|
||||
s.value = decrypt(s.value) || null;
|
||||
return s;
|
||||
});
|
||||
}
|
||||
|
||||
return application;
|
||||
}
|
||||
}
|
||||
export async function pushToRegistry(
|
||||
application: any,
|
||||
workdir: string,
|
||||
tag: string,
|
||||
imageName: string,
|
||||
customTag: string
|
||||
) {
|
||||
const location = `${workdir}/.docker`;
|
||||
const tagCommand = `docker tag ${application.id}:${tag} ${imageName}:${customTag}`;
|
||||
const pushCommand = `docker --config ${location} push ${imageName}:${customTag}`;
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: tagCommand
|
||||
});
|
||||
await executeCommand({
|
||||
dockerId: application.destinationDockerId,
|
||||
command: pushCommand
|
||||
});
|
||||
}
|
||||
|
||||
export async function getContainerUsage(dockerId: string, container: string): Promise<any> {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker container stats ${container} --no-stream --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
return JSON.parse(stdout);
|
||||
} catch (err) {
|
||||
return {
|
||||
MemUsage: 0,
|
||||
CPUPerc: 0,
|
||||
NetIO: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
export function fixType(type) {
|
||||
return type?.replaceAll(' ', '').toLowerCase() || null;
|
||||
}
|
||||
const compareSemanticVersions = (a: string, b: string) => {
|
||||
const a1 = a.split('.');
|
||||
const b1 = b.split('.');
|
||||
const len = Math.min(a1.length, b1.length);
|
||||
for (let i = 0; i < len; i++) {
|
||||
const a2 = +a1[i] || 0;
|
||||
const b2 = +b1[i] || 0;
|
||||
if (a2 !== b2) {
|
||||
return a2 > b2 ? 1 : -1;
|
||||
}
|
||||
}
|
||||
return b1.length - a1.length;
|
||||
};
|
||||
export async function getTags(type: string) {
|
||||
try {
|
||||
if (type) {
|
||||
const tagsPath = isDev ? './tags.json' : '/app/tags.json';
|
||||
const data = await fs.readFile(tagsPath, 'utf8');
|
||||
let tags = JSON.parse(data);
|
||||
if (tags) {
|
||||
tags = tags.find((tag: any) => tag.name.includes(type));
|
||||
tags.tags = tags.tags.sort(compareSemanticVersions).reverse();
|
||||
return tags;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
export function makeLabelForServices(type) {
|
||||
return [
|
||||
'coolify.managed=true',
|
||||
`coolify.version=${version}`,
|
||||
`coolify.type=service`,
|
||||
`coolify.service.type=${type}`
|
||||
];
|
||||
}
|
||||
export const asyncSleep = (delay: number): Promise<unknown> =>
|
||||
new Promise((resolve) => setTimeout(resolve, delay));
|
||||
|
||||
export async function startTraefikTCPProxy(
|
||||
destinationDocker: any,
|
||||
id: string,
|
||||
publicPort: number,
|
||||
privatePort: number,
|
||||
type?: string
|
||||
): Promise<{ stdout: string; stderr: string }> {
|
||||
const { network, id: dockerId, remoteEngine } = destinationDocker;
|
||||
const container = `${id}-${publicPort}`;
|
||||
const { found } = await checkContainer({ dockerId, container, remove: true });
|
||||
const { ipv4, ipv6 } = await listSettings();
|
||||
|
||||
let dependentId = id;
|
||||
if (type === 'wordpressftp') dependentId = `${id}-ftp`;
|
||||
const { found: foundDependentContainer } = await checkContainer({
|
||||
dockerId,
|
||||
container: dependentId,
|
||||
remove: true
|
||||
});
|
||||
if (foundDependentContainer && !found) {
|
||||
const { stdout: Config } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
|
||||
const ip = JSON.parse(Config)[0].Gateway;
|
||||
let traefikUrl = otherTraefikEndpoint;
|
||||
if (remoteEngine) {
|
||||
let ip = null;
|
||||
if (isDev) {
|
||||
ip = getAPIUrl();
|
||||
} else {
|
||||
ip = `http://${ipv4 || ipv6}:3000`;
|
||||
}
|
||||
traefikUrl = `${ip}/webhooks/traefik/other.json`;
|
||||
}
|
||||
const tcpProxy = {
|
||||
version: '3.8',
|
||||
services: {
|
||||
[`${id}-${publicPort}`]: {
|
||||
container_name: container,
|
||||
image: defaultTraefikImage,
|
||||
command: [
|
||||
`--entrypoints.tcp.address=:${publicPort}`,
|
||||
`--entryPoints.tcp.forwardedHeaders.insecure=true`,
|
||||
`--providers.http.endpoint=${traefikUrl}?id=${id}&privatePort=${privatePort}&publicPort=${publicPort}&type=tcp&address=${dependentId}`,
|
||||
'--providers.http.pollTimeout=10s',
|
||||
'--log.level=error'
|
||||
],
|
||||
ports: [`${publicPort}:${publicPort}`],
|
||||
extra_hosts: ['host.docker.internal:host-gateway', `host.docker.internal: ${ip}`],
|
||||
volumes: ['/var/run/docker.sock:/var/run/docker.sock'],
|
||||
networks: ['coolify-infra', network]
|
||||
}
|
||||
},
|
||||
networks: {
|
||||
[network]: {
|
||||
external: false,
|
||||
name: network
|
||||
},
|
||||
'coolify-infra': {
|
||||
external: false,
|
||||
name: 'coolify-infra'
|
||||
}
|
||||
}
|
||||
};
|
||||
await fs.writeFile(`/tmp/docker-compose-${id}.yaml`, yaml.dump(tcpProxy));
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker compose -f /tmp/docker-compose-${id}.yaml up -d`
|
||||
});
|
||||
await fs.rm(`/tmp/docker-compose-${id}.yaml`);
|
||||
}
|
||||
if (!foundDependentContainer && found) {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function startTraefikProxy(id: string): Promise<void> {
|
||||
const { engine, network, remoteEngine, remoteIpAddress } =
|
||||
await prisma.destinationDocker.findUnique({ where: { id } });
|
||||
const { found } = await checkContainer({
|
||||
dockerId: id,
|
||||
container: 'coolify-proxy',
|
||||
remove: true
|
||||
});
|
||||
const { id: settingsId, ipv4, ipv6 } = await listSettings();
|
||||
|
||||
if (!found) {
|
||||
const { stdout: coolifyNetwork } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network ls --filter 'name=coolify-infra' --no-trunc --format "{{json .}}"`
|
||||
});
|
||||
|
||||
if (!coolifyNetwork) {
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network create --attachable coolify-infra`
|
||||
});
|
||||
}
|
||||
const { stdout: Config } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker network inspect ${network} --format '{{json .IPAM.Config }}'`
|
||||
});
|
||||
const ip = JSON.parse(Config)[0].Gateway;
|
||||
let traefikUrl = mainTraefikEndpoint;
|
||||
if (remoteEngine) {
|
||||
let ip = null;
|
||||
if (isDev) {
|
||||
ip = getAPIUrl();
|
||||
} else {
|
||||
ip = `http://${ipv4 || ipv6}:3000`;
|
||||
}
|
||||
traefikUrl = `${ip}/webhooks/traefik/remote/${id}`;
|
||||
}
|
||||
await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker run --restart always \
|
||||
--add-host 'host.docker.internal:host-gateway' \
|
||||
${ip ? `--add-host 'host.docker.internal:${ip}'` : ''} \
|
||||
-v coolify-traefik-letsencrypt:/etc/traefik/acme \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
--network coolify-infra \
|
||||
-p "80:80" \
|
||||
-p "443:443" \
|
||||
--name coolify-proxy \
|
||||
-d ${defaultTraefikImage} \
|
||||
--entrypoints.web.address=:80 \
|
||||
--entrypoints.web.forwardedHeaders.insecure=true \
|
||||
--entrypoints.websecure.address=:443 \
|
||||
--entrypoints.websecure.forwardedHeaders.insecure=true \
|
||||
--providers.docker=true \
|
||||
--providers.docker.exposedbydefault=false \
|
||||
--providers.http.endpoint=${traefikUrl} \
|
||||
--providers.http.pollTimeout=5s \
|
||||
--certificatesresolvers.letsencrypt.acme.httpchallenge=true \
|
||||
--certificatesresolvers.letsencrypt.acme.storage=/etc/traefik/acme/acme.json \
|
||||
--certificatesresolvers.letsencrypt.acme.httpchallenge.entrypoint=web \
|
||||
--log.level=error`
|
||||
});
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: true }
|
||||
});
|
||||
}
|
||||
// Configure networks for local docker engine
|
||||
if (engine) {
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { engine } });
|
||||
for (const destination of destinations) {
|
||||
await configureNetworkTraefikProxy(destination);
|
||||
}
|
||||
}
|
||||
// Configure networks for remote docker engine
|
||||
if (remoteEngine) {
|
||||
const destinations = await prisma.destinationDocker.findMany({ where: { remoteIpAddress } });
|
||||
for (const destination of destinations) {
|
||||
await configureNetworkTraefikProxy(destination);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function configureNetworkTraefikProxy(destination: any): Promise<void> {
|
||||
const { id } = destination;
|
||||
const { stdout: networks } = await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker ps -a --filter name=coolify-proxy --format '{{json .Networks}}'`
|
||||
});
|
||||
const configuredNetworks = networks.replace(/"/g, '').replace('\n', '').split(',');
|
||||
if (!configuredNetworks.includes(destination.network)) {
|
||||
await executeCommand({
|
||||
dockerId: destination.id,
|
||||
command: `docker network connect ${destination.network} coolify-proxy`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function stopTraefikProxy(id: string): Promise<{ stdout: string; stderr: string }> {
|
||||
const { found } = await checkContainer({ dockerId: id, container: 'coolify-proxy' });
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { isCoolifyProxyUsed: false }
|
||||
});
|
||||
if (found) {
|
||||
return await executeCommand({
|
||||
dockerId: id,
|
||||
command: `docker stop -t 0 coolify-proxy && docker rm coolify-proxy`,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
return { stdout: '', stderr: '' };
|
||||
}
|
||||
7
apps/trpc-experimental/server/src/lib/dayjs.ts
Normal file
7
apps/trpc-experimental/server/src/lib/dayjs.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import dayjs from 'dayjs';
|
||||
import utc from 'dayjs/plugin/utc.js';
|
||||
import relativeTime from 'dayjs/plugin/relativeTime.js';
|
||||
dayjs.extend(utc);
|
||||
dayjs.extend(relativeTime);
|
||||
|
||||
export { dayjs as day };
|
||||
157
apps/trpc-experimental/server/src/lib/docker.ts
Normal file
157
apps/trpc-experimental/server/src/lib/docker.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { executeCommand } from './executeCommand';
|
||||
|
||||
export async function checkContainer({
|
||||
dockerId,
|
||||
container,
|
||||
remove = false
|
||||
}: {
|
||||
dockerId: string;
|
||||
container: string;
|
||||
remove?: boolean;
|
||||
}): Promise<{
|
||||
found: boolean;
|
||||
status?: { isExited: boolean; isRunning: boolean; isRestarting: boolean };
|
||||
}> {
|
||||
let containerFound = false;
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${container}`
|
||||
});
|
||||
containerFound = true;
|
||||
const parsedStdout = JSON.parse(stdout);
|
||||
const status = parsedStdout.Status;
|
||||
const isRunning = status === 'running';
|
||||
const isRestarting = status === 'restarting';
|
||||
const isExited = status === 'exited';
|
||||
if (status === 'created') {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker rm ${container}`
|
||||
});
|
||||
}
|
||||
if (remove && status === 'exited') {
|
||||
await executeCommand({
|
||||
dockerId,
|
||||
command: `docker rm ${container}`
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
found: containerFound,
|
||||
status: {
|
||||
isRunning,
|
||||
isRestarting,
|
||||
isExited
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
// Container not found
|
||||
}
|
||||
return {
|
||||
found: false
|
||||
};
|
||||
}
|
||||
|
||||
export async function removeContainer({
|
||||
id,
|
||||
dockerId
|
||||
}: {
|
||||
id: string;
|
||||
dockerId: string;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
if (JSON.parse(stdout).Running) {
|
||||
await executeCommand({ dockerId, command: `docker stop -t 0 ${id}` });
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` });
|
||||
}
|
||||
if (JSON.parse(stdout).Status === 'exited') {
|
||||
await executeCommand({ dockerId, command: `docker rm ${id}` });
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function stopDatabaseContainer(database: any): Promise<boolean> {
|
||||
let everStarted = false;
|
||||
const {
|
||||
id,
|
||||
destinationDockerId,
|
||||
destinationDocker: { engine, id: dockerId }
|
||||
} = database;
|
||||
if (destinationDockerId) {
|
||||
try {
|
||||
const { stdout } = await executeCommand({
|
||||
dockerId,
|
||||
command: `docker inspect --format '{{json .State}}' ${id}`
|
||||
});
|
||||
|
||||
if (stdout) {
|
||||
everStarted = true;
|
||||
await removeContainer({ id, dockerId });
|
||||
}
|
||||
} catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
return everStarted;
|
||||
}
|
||||
export async function stopTcpHttpProxy(
|
||||
id: string,
|
||||
destinationDocker: any,
|
||||
publicPort: number,
|
||||
forceName: string | null = null
|
||||
): Promise<{ stdout: string; stderr: string } | Error | unknown> {
|
||||
const { id: dockerId } = destinationDocker;
|
||||
let container = `${id}-${publicPort}`;
|
||||
if (forceName) container = forceName;
|
||||
const { found } = await checkContainer({ dockerId, container });
|
||||
try {
|
||||
if (!found) return true;
|
||||
return await executeCommand({
|
||||
dockerId,
|
||||
command: `docker stop -t 0 ${container} && docker rm ${container}`,
|
||||
shell: true
|
||||
});
|
||||
} catch (error) {
|
||||
return error;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatLabelsOnDocker(data: any) {
|
||||
return data
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((a) => JSON.parse(a))
|
||||
.map((container) => {
|
||||
const labels = container.Labels.split(',');
|
||||
let jsonLabels = {};
|
||||
labels.forEach((l) => {
|
||||
const name = l.split('=')[0];
|
||||
const value = l.split('=')[1];
|
||||
jsonLabels = { ...jsonLabels, ...{ [name]: value } };
|
||||
});
|
||||
container.Labels = jsonLabels;
|
||||
return container;
|
||||
});
|
||||
}
|
||||
|
||||
export function defaultComposeConfiguration(network: string): any {
|
||||
return {
|
||||
networks: [network],
|
||||
restart: 'on-failure',
|
||||
deploy: {
|
||||
restart_policy: {
|
||||
condition: 'on-failure',
|
||||
delay: '5s',
|
||||
max_attempts: 10,
|
||||
window: '120s'
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
196
apps/trpc-experimental/server/src/lib/executeCommand.ts
Normal file
196
apps/trpc-experimental/server/src/lib/executeCommand.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { prisma } from '../prisma';
|
||||
import os from 'os';
|
||||
import fs from 'fs/promises';
|
||||
import type { ExecaChildProcess } from 'execa';
|
||||
import sshConfig from 'ssh-config';
|
||||
|
||||
import { getFreeSSHLocalPort } from './ssh';
|
||||
import { env } from '../env';
|
||||
import { BuildLog, saveBuildLog } from './logging';
|
||||
import { decrypt } from './common';
|
||||
|
||||
export async function executeCommand({
|
||||
command,
|
||||
dockerId = null,
|
||||
sshCommand = false,
|
||||
shell = false,
|
||||
stream = false,
|
||||
buildId,
|
||||
applicationId,
|
||||
debug
|
||||
}: {
|
||||
command: string;
|
||||
sshCommand?: boolean;
|
||||
shell?: boolean;
|
||||
stream?: boolean;
|
||||
dockerId?: string | null;
|
||||
buildId?: string;
|
||||
applicationId?: string;
|
||||
debug?: boolean;
|
||||
}): Promise<ExecaChildProcess<string>> {
|
||||
const { execa, execaCommand } = await import('execa');
|
||||
const { parse } = await import('shell-quote');
|
||||
const parsedCommand = parse(command);
|
||||
const dockerCommand = parsedCommand[0];
|
||||
const dockerArgs = parsedCommand.slice(1);
|
||||
|
||||
if (dockerId && dockerCommand && dockerArgs) {
|
||||
const destinationDocker = await prisma.destinationDocker.findUnique({
|
||||
where: { id: dockerId }
|
||||
});
|
||||
if (!destinationDocker) {
|
||||
throw new Error('Destination docker not found');
|
||||
}
|
||||
let { remoteEngine, remoteIpAddress, engine } = destinationDocker;
|
||||
if (remoteEngine) {
|
||||
await createRemoteEngineConfiguration(dockerId);
|
||||
engine = `ssh://${remoteIpAddress}-remote`;
|
||||
} else {
|
||||
engine = 'unix:///var/run/docker.sock';
|
||||
}
|
||||
|
||||
if (env.CODESANDBOX_HOST) {
|
||||
if (command.startsWith('docker compose')) {
|
||||
command = command.replace(/docker compose/gi, 'docker-compose');
|
||||
}
|
||||
}
|
||||
if (sshCommand) {
|
||||
if (shell) {
|
||||
return execaCommand(`ssh ${remoteIpAddress}-remote ${command}`);
|
||||
}
|
||||
//@ts-ignore
|
||||
return await execa('ssh', [`${remoteIpAddress}-remote`, dockerCommand, ...dockerArgs]);
|
||||
}
|
||||
if (stream) {
|
||||
return await new Promise(async (resolve, reject) => {
|
||||
let subprocess = null;
|
||||
if (shell) {
|
||||
//@ts-ignore
|
||||
subprocess = execaCommand(command, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
} else {
|
||||
//@ts-ignore
|
||||
subprocess = execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
}
|
||||
const logs: any[] = [];
|
||||
if (subprocess && subprocess.stdout && subprocess.stderr) {
|
||||
subprocess.stdout.on('data', async (data: string) => {
|
||||
const stdout = data.toString();
|
||||
const array = stdout.split('\n');
|
||||
for (const line of array) {
|
||||
if (line !== '\n' && line !== '') {
|
||||
const log: BuildLog = {
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
};
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.stderr.on('data', async (data: string) => {
|
||||
const stderr = data.toString();
|
||||
const array = stderr.split('\n');
|
||||
for (const line of array) {
|
||||
if (line !== '\n' && line !== '') {
|
||||
const log = {
|
||||
line: `${line.replace('\n', '')}`,
|
||||
buildId,
|
||||
applicationId
|
||||
};
|
||||
logs.push(log);
|
||||
if (debug) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
subprocess.on('exit', async (code: number) => {
|
||||
if (code === 0) {
|
||||
//@ts-ignore
|
||||
resolve(code);
|
||||
} else {
|
||||
if (!debug) {
|
||||
for (const log of logs) {
|
||||
await saveBuildLog(log);
|
||||
}
|
||||
}
|
||||
reject(code);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (shell) {
|
||||
return await execaCommand(command, {
|
||||
//@ts-ignore
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
} else {
|
||||
//@ts-ignore
|
||||
return await execa(dockerCommand, dockerArgs, {
|
||||
env: { DOCKER_BUILDKIT: '1', DOCKER_HOST: engine }
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (shell) {
|
||||
return execaCommand(command, { shell: true });
|
||||
}
|
||||
//@ts-ignore
|
||||
return await execa(dockerCommand, dockerArgs);
|
||||
}
|
||||
}
|
||||
|
||||
export async function createRemoteEngineConfiguration(id: string) {
|
||||
const homedir = os.homedir();
|
||||
const sshKeyFile = `/tmp/id_rsa-${id}`;
|
||||
const localPort = await getFreeSSHLocalPort(id);
|
||||
const {
|
||||
sshKey: { privateKey },
|
||||
network,
|
||||
remoteIpAddress,
|
||||
remotePort,
|
||||
remoteUser
|
||||
} = await prisma.destinationDocker.findFirst({ where: { id }, include: { sshKey: true } });
|
||||
await fs.writeFile(sshKeyFile, decrypt(privateKey) + '\n', { encoding: 'utf8', mode: 400 });
|
||||
const config = sshConfig.parse('');
|
||||
const Host = `${remoteIpAddress}-remote`;
|
||||
|
||||
try {
|
||||
await executeCommand({ command: `ssh-keygen -R ${Host}` });
|
||||
await executeCommand({ command: `ssh-keygen -R ${remoteIpAddress}` });
|
||||
await executeCommand({ command: `ssh-keygen -R localhost:${localPort}` });
|
||||
} catch (error) {}
|
||||
|
||||
const found = config.find({ Host });
|
||||
const foundIp = config.find({ Host: remoteIpAddress });
|
||||
|
||||
if (found) config.remove({ Host });
|
||||
if (foundIp) config.remove({ Host: remoteIpAddress });
|
||||
|
||||
config.append({
|
||||
Host,
|
||||
Hostname: remoteIpAddress,
|
||||
Port: remotePort.toString(),
|
||||
User: remoteUser,
|
||||
StrictHostKeyChecking: 'no',
|
||||
IdentityFile: sshKeyFile,
|
||||
ControlMaster: 'auto',
|
||||
ControlPath: `${homedir}/.ssh/coolify-${remoteIpAddress}-%r@%h:%p`,
|
||||
ControlPersist: '10m'
|
||||
});
|
||||
|
||||
try {
|
||||
await fs.stat(`${homedir}/.ssh/`);
|
||||
} catch (error) {
|
||||
await fs.mkdir(`${homedir}/.ssh/`);
|
||||
}
|
||||
return await fs.writeFile(`${homedir}/.ssh/config`, sshConfig.stringify(config));
|
||||
}
|
||||
96
apps/trpc-experimental/server/src/lib/importers/github.ts
Normal file
96
apps/trpc-experimental/server/src/lib/importers/github.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
|
||||
import jsonwebtoken from 'jsonwebtoken';
|
||||
import { prisma } from '../../prisma';
|
||||
import { saveBuildLog } from '../buildPacks/common';
|
||||
import { decrypt } from '../common';
|
||||
import { executeCommand } from '../executeCommand';
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
githubAppId,
|
||||
repository,
|
||||
apiUrl,
|
||||
gitCommitHash,
|
||||
htmlUrl,
|
||||
branch,
|
||||
buildId,
|
||||
customPort,
|
||||
forPublic
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
githubAppId: string;
|
||||
repository: string;
|
||||
apiUrl: string;
|
||||
gitCommitHash?: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
customPort: number;
|
||||
forPublic?: boolean;
|
||||
}): Promise<string> {
|
||||
const { default: got } = await import('got')
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '');
|
||||
if (forPublic) {
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
|
||||
} else {
|
||||
const body = await prisma.githubApp.findUnique({ where: { id: githubAppId } });
|
||||
if (body.privateKey) body.privateKey = decrypt(body.privateKey);
|
||||
const { privateKey, appId, installationId } = body
|
||||
const githubPrivateKey = privateKey.replace(/\\n/g, '\n').replace(/"/g, '');
|
||||
|
||||
const payload = {
|
||||
iat: Math.round(new Date().getTime() / 1000),
|
||||
exp: Math.round(new Date().getTime() / 1000 + 60),
|
||||
iss: appId
|
||||
};
|
||||
const jwtToken = jsonwebtoken.sign(payload, githubPrivateKey, {
|
||||
algorithm: 'RS256'
|
||||
});
|
||||
const { token } = await got
|
||||
.post(`${apiUrl}/app/installations/${installationId}/access_tokens`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
Accept: 'application/vnd.github.machine-man-preview+json'
|
||||
}
|
||||
})
|
||||
.json();
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://x-access-token:${token}@${url}/${repository}.git --config core.sshCommand="ssh -p ${customPort}" ${workdir}/ && cd ${workdir} && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `,
|
||||
shell: true
|
||||
});
|
||||
}
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
65
apps/trpc-experimental/server/src/lib/importers/gitlab.ts
Normal file
65
apps/trpc-experimental/server/src/lib/importers/gitlab.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { saveBuildLog } from "../buildPacks/common";
|
||||
import { executeCommand } from "../executeCommand";
|
||||
|
||||
export default async function ({
|
||||
applicationId,
|
||||
workdir,
|
||||
repodir,
|
||||
htmlUrl,
|
||||
gitCommitHash,
|
||||
repository,
|
||||
branch,
|
||||
buildId,
|
||||
privateSshKey,
|
||||
customPort,
|
||||
forPublic,
|
||||
customUser,
|
||||
}: {
|
||||
applicationId: string;
|
||||
workdir: string;
|
||||
repository: string;
|
||||
htmlUrl: string;
|
||||
branch: string;
|
||||
buildId: string;
|
||||
repodir: string;
|
||||
gitCommitHash: string;
|
||||
privateSshKey: string;
|
||||
customPort: number;
|
||||
forPublic: boolean;
|
||||
customUser: string;
|
||||
}): Promise<string> {
|
||||
const url = htmlUrl.replace('https://', '').replace('http://', '').replace(/\/$/, '');
|
||||
if (!forPublic) {
|
||||
await executeCommand({ command: `echo '${privateSshKey}' > ${repodir}/id.rsa`, shell: true });
|
||||
await executeCommand({ command: `chmod 600 ${repodir}/id.rsa` });
|
||||
}
|
||||
|
||||
await saveBuildLog({
|
||||
line: `Cloning ${repository}:${branch}...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
if (gitCommitHash) {
|
||||
await saveBuildLog({
|
||||
line: `Checking out ${gitCommitHash} commit...`,
|
||||
buildId,
|
||||
applicationId
|
||||
});
|
||||
}
|
||||
if (forPublic) {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} https://${url}/${repository}.git ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
} else {
|
||||
await executeCommand({
|
||||
command:
|
||||
`git clone -q -b ${branch} ${customUser}@${url}:${repository}.git --config core.sshCommand="ssh -p ${customPort} -q -i ${repodir}id.rsa -o StrictHostKeyChecking=no" ${workdir}/ && cd ${workdir}/ && git checkout ${gitCommitHash || ""} && git submodule update --init --recursive && git lfs pull && cd .. `, shell: true
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
const { stdout: commit } = await executeCommand({ command: `cd ${workdir}/ && git rev-parse HEAD`, shell: true });
|
||||
return commit.replace('\n', '');
|
||||
}
|
||||
4
apps/trpc-experimental/server/src/lib/importers/index.ts
Normal file
4
apps/trpc-experimental/server/src/lib/importers/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import github from './github';
|
||||
import gitlab from './gitlab';
|
||||
|
||||
export { github, gitlab };
|
||||
48
apps/trpc-experimental/server/src/lib/logging.ts
Normal file
48
apps/trpc-experimental/server/src/lib/logging.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { prisma } from '../prisma';
|
||||
import { encrypt, generateTimestamp, isDev } from './common';
|
||||
import { day } from './dayjs';
|
||||
|
||||
export type Line = string | { shortMessage: string; stderr: string };
|
||||
export type BuildLog = {
|
||||
line: Line;
|
||||
buildId?: string;
|
||||
applicationId?: string;
|
||||
};
|
||||
export const saveBuildLog = async ({ line, buildId, applicationId }: BuildLog): Promise<any> => {
|
||||
if (buildId === 'undefined' || buildId === 'null' || !buildId) return;
|
||||
if (applicationId === 'undefined' || applicationId === 'null' || !applicationId) return;
|
||||
const { default: got } = await import('got');
|
||||
if (typeof line === 'object' && line) {
|
||||
if (line.shortMessage) {
|
||||
line = line.shortMessage + '\n' + line.stderr;
|
||||
} else {
|
||||
line = JSON.stringify(line);
|
||||
}
|
||||
}
|
||||
if (line && typeof line === 'string' && line.includes('ghs_')) {
|
||||
const regex = /ghs_.*@/g;
|
||||
line = line.replace(regex, '<SENSITIVE_DATA_DELETED>@');
|
||||
}
|
||||
const addTimestamp = `[${generateTimestamp()}] ${line}`;
|
||||
const fluentBitUrl = isDev ? 'http://localhost:24224' : 'http://coolify-fluentbit:24224';
|
||||
|
||||
if (isDev) {
|
||||
console.debug(`[${applicationId}] ${addTimestamp}`);
|
||||
}
|
||||
try {
|
||||
return await got.post(`${fluentBitUrl}/${applicationId}_buildlog_${buildId}.csv`, {
|
||||
json: {
|
||||
line: encrypt(line)
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
return await prisma.buildLog.create({
|
||||
data: {
|
||||
line: addTimestamp,
|
||||
buildId,
|
||||
time: Number(day().valueOf()),
|
||||
applicationId
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
47
apps/trpc-experimental/server/src/lib/ssh.ts
Normal file
47
apps/trpc-experimental/server/src/lib/ssh.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { prisma } from '../prisma';
|
||||
import { generateRangeArray } from './common';
|
||||
|
||||
export async function getFreeSSHLocalPort(id: string): Promise<number | boolean> {
|
||||
const { default: isReachable } = await import('is-port-reachable');
|
||||
const { remoteIpAddress, sshLocalPort } = await prisma.destinationDocker.findUnique({
|
||||
where: { id }
|
||||
});
|
||||
if (sshLocalPort) {
|
||||
return Number(sshLocalPort);
|
||||
}
|
||||
|
||||
const data = await prisma.setting.findFirst();
|
||||
const { minPort, maxPort } = data;
|
||||
|
||||
const ports = await prisma.destinationDocker.findMany({
|
||||
where: { sshLocalPort: { not: null }, remoteIpAddress: { not: remoteIpAddress } }
|
||||
});
|
||||
|
||||
const alreadyConfigured = await prisma.destinationDocker.findFirst({
|
||||
where: {
|
||||
remoteIpAddress,
|
||||
id: { not: id },
|
||||
sshLocalPort: { not: null }
|
||||
}
|
||||
});
|
||||
if (alreadyConfigured?.sshLocalPort) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { sshLocalPort: alreadyConfigured.sshLocalPort }
|
||||
});
|
||||
return Number(alreadyConfigured.sshLocalPort);
|
||||
}
|
||||
const range = generateRangeArray(minPort, maxPort);
|
||||
const availablePorts = range.filter((port) => !ports.map((p) => p.sshLocalPort).includes(port));
|
||||
for (const port of availablePorts) {
|
||||
const found = await isReachable(port, { host: 'localhost' });
|
||||
if (!found) {
|
||||
await prisma.destinationDocker.update({
|
||||
where: { id },
|
||||
data: { sshLocalPort: Number(port) }
|
||||
});
|
||||
return Number(port);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
Reference in New Issue
Block a user