Compare commits

...

10 Commits
v3.12.35 ... v3

Author SHA1 Message Date
Andras Bacsai
b80a519b80 updates 2023-09-05 11:31:55 +02:00
Andras Bacsai
2fa7ffc931 updates 2023-09-05 11:25:16 +02:00
Andras Bacsai
4abec14a21 updates 2023-09-05 11:24:42 +02:00
Andras Bacsai
18d0623011 force volume prune 2023-08-16 15:27:10 +02:00
Andras Bacsai
aa634c78d1 fix seed js 2023-08-16 15:26:36 +02:00
Andras Bacsai
a2d4373104 cleanup volumes as well 2023-08-16 15:26:33 +02:00
Andras Bacsai
702e16d643 rename rollback to upgrade 2023-08-14 09:23:33 +02:00
Andras Bacsai
3b25c8f96b fix: docker compose env file 2023-08-12 00:10:14 +02:00
Andras Bacsai
1c8c567791 fix: env variables in compose deplyoments 2023-07-27 12:40:58 +02:00
Andras Bacsai
807a3c9d66 fix: n8n double mount
version++
2023-07-26 10:38:36 +02:00
9 changed files with 42 additions and 299 deletions

View File

@@ -4,9 +4,8 @@ concurrency:
cancel-in-progress: true
on:
push:
branches-ignore:
- "main"
- "v4"
branches:
- "v3"
env:
REGISTRY: ghcr.io
IMAGE_NAME: "coollabsio/coolify"

View File

@@ -2440,6 +2440,7 @@
image: n8nio/n8n:$$core_version
volumes:
- $$id-data:/root/.n8n
- $$id-data:/home/node/.n8n
- $$id-data-write:/files
- /var/run/docker.sock:/var/run/docker.sock
environment:
@@ -3371,141 +3372,6 @@
- POSTGRES_PASSWORD=$$secret_postgres_password
- POSTGRES_DB=$$config_postgres_db
ports: []
files:
- location: /docker-entrypoint-initdb.d/schema.postgresql.sql
content: |2-
-- CreateTable
CREATE TABLE "account" (
"user_id" SERIAL NOT NULL,
"username" VARCHAR(255) NOT NULL,
"password" VARCHAR(60) NOT NULL,
"is_admin" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("user_id")
);
-- CreateTable
CREATE TABLE "event" (
"event_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"event_type" VARCHAR(50) NOT NULL,
"event_value" VARCHAR(50) NOT NULL,
PRIMARY KEY ("event_id")
);
-- CreateTable
CREATE TABLE "pageview" (
"view_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"referrer" VARCHAR(500),
PRIMARY KEY ("view_id")
);
-- CreateTable
CREATE TABLE "session" (
"session_id" SERIAL NOT NULL,
"session_uuid" UUID NOT NULL,
"website_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"hostname" VARCHAR(100),
"browser" VARCHAR(20),
"os" VARCHAR(20),
"device" VARCHAR(20),
"screen" VARCHAR(11),
"language" VARCHAR(35),
"country" CHAR(2),
PRIMARY KEY ("session_id")
);
-- CreateTable
CREATE TABLE "website" (
"website_id" SERIAL NOT NULL,
"website_uuid" UUID NOT NULL,
"user_id" INTEGER NOT NULL,
"name" VARCHAR(100) NOT NULL,
"domain" VARCHAR(500),
"share_id" VARCHAR(64),
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("website_id")
);
-- CreateIndex
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
-- CreateIndex
CREATE INDEX "event_created_at_idx" ON "event"("created_at");
-- CreateIndex
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
-- CreateIndex
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
-- CreateIndex
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
-- CreateIndex
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
-- CreateIndex
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
-- CreateIndex
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
-- CreateIndex
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
-- CreateIndex
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
-- CreateIndex
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
-- CreateIndex
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
-- CreateIndex
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
insert into account (username, password, is_admin) values ('admin', '$$hashed$$secret_admin_password', true);
variables:
- id: $$secret_database_url
name: DATABASE_URL
@@ -3577,141 +3443,6 @@
- POSTGRES_PASSWORD=$$secret_postgres_password
- POSTGRES_DB=$$config_postgres_db
ports: []
files:
- location: /docker-entrypoint-initdb.d/schema.postgresql.sql
content: |2-
-- CreateTable
CREATE TABLE "account" (
"user_id" SERIAL NOT NULL,
"username" VARCHAR(255) NOT NULL,
"password" VARCHAR(60) NOT NULL,
"is_admin" BOOLEAN NOT NULL DEFAULT false,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("user_id")
);
-- CreateTable
CREATE TABLE "event" (
"event_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"event_type" VARCHAR(50) NOT NULL,
"event_value" VARCHAR(50) NOT NULL,
PRIMARY KEY ("event_id")
);
-- CreateTable
CREATE TABLE "pageview" (
"view_id" SERIAL NOT NULL,
"website_id" INTEGER NOT NULL,
"session_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"url" VARCHAR(500) NOT NULL,
"referrer" VARCHAR(500),
PRIMARY KEY ("view_id")
);
-- CreateTable
CREATE TABLE "session" (
"session_id" SERIAL NOT NULL,
"session_uuid" UUID NOT NULL,
"website_id" INTEGER NOT NULL,
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
"hostname" VARCHAR(100),
"browser" VARCHAR(20),
"os" VARCHAR(20),
"device" VARCHAR(20),
"screen" VARCHAR(11),
"language" VARCHAR(35),
"country" CHAR(2),
PRIMARY KEY ("session_id")
);
-- CreateTable
CREATE TABLE "website" (
"website_id" SERIAL NOT NULL,
"website_uuid" UUID NOT NULL,
"user_id" INTEGER NOT NULL,
"name" VARCHAR(100) NOT NULL,
"domain" VARCHAR(500),
"share_id" VARCHAR(64),
"created_at" TIMESTAMPTZ(6) DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("website_id")
);
-- CreateIndex
CREATE UNIQUE INDEX "account.username_unique" ON "account"("username");
-- CreateIndex
CREATE INDEX "event_created_at_idx" ON "event"("created_at");
-- CreateIndex
CREATE INDEX "event_session_id_idx" ON "event"("session_id");
-- CreateIndex
CREATE INDEX "event_website_id_idx" ON "event"("website_id");
-- CreateIndex
CREATE INDEX "pageview_created_at_idx" ON "pageview"("created_at");
-- CreateIndex
CREATE INDEX "pageview_session_id_idx" ON "pageview"("session_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_created_at_idx" ON "pageview"("website_id", "created_at");
-- CreateIndex
CREATE INDEX "pageview_website_id_idx" ON "pageview"("website_id");
-- CreateIndex
CREATE INDEX "pageview_website_id_session_id_created_at_idx" ON "pageview"("website_id", "session_id", "created_at");
-- CreateIndex
CREATE UNIQUE INDEX "session.session_uuid_unique" ON "session"("session_uuid");
-- CreateIndex
CREATE INDEX "session_created_at_idx" ON "session"("created_at");
-- CreateIndex
CREATE INDEX "session_website_id_idx" ON "session"("website_id");
-- CreateIndex
CREATE UNIQUE INDEX "website.website_uuid_unique" ON "website"("website_uuid");
-- CreateIndex
CREATE UNIQUE INDEX "website.share_id_unique" ON "website"("share_id");
-- CreateIndex
CREATE INDEX "website_user_id_idx" ON "website"("user_id");
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "event" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("session_id") REFERENCES "session"("session_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "pageview" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "session" ADD FOREIGN KEY ("website_id") REFERENCES "website"("website_id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "website" ADD FOREIGN KEY ("user_id") REFERENCES "account"("user_id") ON DELETE CASCADE ON UPDATE CASCADE;
insert into account (username, password, is_admin) values ('admin', '$$hashed$$secret_admin_password', true);
variables:
- id: $$secret_database_url
name: DATABASE_URL

View File

@@ -19,7 +19,9 @@ export default async function (data) {
dockerComposeConfiguration,
dockerComposeFileLocation
} = data;
const fileYaml = `${workdir}${baseDirectory}${dockerComposeFileLocation}`;
const baseDir = `${workdir}${baseDirectory}`;
const envFile = `${baseDir}/.env`;
const fileYaml = `${baseDir}${dockerComposeFileLocation}`;
const dockerComposeRaw = await fs.readFile(fileYaml, 'utf8');
const dockerComposeYaml = yaml.load(dockerComposeRaw);
if (!dockerComposeYaml.services) {
@@ -31,7 +33,7 @@ export default async function (data) {
envs = [...envs, ...generateSecrets(secrets, pullmergeRequestId, false, null)];
buildEnvs = [...buildEnvs, ...generateSecrets(secrets, pullmergeRequestId, true, null, true)];
}
await fs.writeFile(envFile, envs.join('\n'));
const composeVolumes = [];
if (volumes.length > 0) {
for (const volume of volumes) {
@@ -50,31 +52,38 @@ export default async function (data) {
if (value['env_file']) {
delete value['env_file'];
}
value['env_file'] = [envFile];
let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
if (Object.keys(environment).length > 0) {
environment = Object.entries(environment).map(([key, value]) => `${key}=${value}`);
}
value['environment'] = [...environment, ...envs];
// let environment = typeof value['environment'] === 'undefined' ? [] : value['environment'];
// let finalEnvs = [...envs];
// if (Object.keys(environment).length > 0) {
// for (const arg of Object.keys(environment)) {
// const [key, _] = arg.split('=');
// if (finalEnvs.filter((env) => env.startsWith(key)).length === 0) {
// finalEnvs.push(arg);
// }
// }
// }
// value['environment'] = [...finalEnvs];
let build = typeof value['build'] === 'undefined' ? [] : value['build'];
if (typeof build === 'string') {
build = { context: build };
}
const buildArgs = typeof build['args'] === 'undefined' ? [] : build['args'];
let finalArgs = [...buildEnvs];
let finalBuildArgs = [...buildEnvs];
if (Object.keys(buildArgs).length > 0) {
for (const arg of Object.keys(buildArgs)) {
const [key, _] = arg.split('=');
if (finalArgs.filter((env) => env.startsWith(key)).length === 0) {
finalArgs.push(arg);
if (finalBuildArgs.filter((env) => env.startsWith(key)).length === 0) {
finalBuildArgs.push(arg);
}
}
}
if (build.length > 0 || buildArgs.length > 0) {
value['build'] = {
...build,
args: finalArgs
args: finalBuildArgs
};
}
@@ -121,7 +130,6 @@ export default async function (data) {
.replace(/^\./, `~`)
.replace(/^\.\./, '~')
.replace(/^\$PWD/, '~');
console.log({ source });
} else {
if (!target) {
target = source;

View File

@@ -18,7 +18,7 @@ import { scheduler } from './scheduler';
import type { ExecaChildProcess } from 'execa';
import { FastifyReply } from 'fastify';
export const version = '3.12.35';
export const version = '3.12.39';
export const isDev = process.env.NODE_ENV === 'development';
export const proxyPort = process.env.COOLIFY_PROXY_PORT;
export const proxySecurePort = process.env.COOLIFY_PROXY_SECURE_PORT;
@@ -1762,7 +1762,7 @@ export function convertTolOldVolumeNames(type) {
}
}
export async function cleanupDockerStorage(dockerId) {
export async function cleanupDockerStorage(dockerId, volumes = false) {
// Cleanup images that are not used by any container
try {
await executeCommand({ dockerId, command: `docker image prune -af` });
@@ -1780,6 +1780,11 @@ export async function cleanupDockerStorage(dockerId) {
try {
await executeCommand({ dockerId, command: `docker builder prune -af` });
} catch (error) { }
if (volumes) {
try {
await executeCommand({ dockerId, command: `docker volume prune -af` });
} catch (error) { }
}
}
export function persistentVolumes(id, persistentStorage, config) {

View File

@@ -57,7 +57,7 @@ export async function cleanupManually(request: FastifyRequest) {
const destination = await prisma.destinationDocker.findUnique({
where: { id: serverId }
});
await cleanupDockerStorage(destination.id);
await cleanupDockerStorage(destination.id, true);
return {};
} catch ({ status, message }) {
return errorHandler({ status, message });

View File

@@ -477,7 +477,7 @@ export async function saveServiceType(
const [volumeName, path] = volume.split(':');
if (!volumeName.startsWith('/')) {
const found = await prisma.servicePersistentStorage.findFirst({
where: { volumeName, serviceId: id }
where: { volumeName, serviceId: id, path }
});
if (!found) {
await prisma.servicePersistentStorage.create({

File diff suppressed because one or more lines are too long

View File

@@ -56,23 +56,23 @@
async function rollback() {
if (rollbackVersion) {
const sure = confirm(`Are you sure you want rollback Coolify to ${rollbackVersion}?`);
const sure = confirm(`Are you sure you want upgrade Coolify to ${rollbackVersion}?`);
if (sure) {
try {
loading.rollback = true;
console.log('loading.rollback', loading.rollback);
if (dev) {
console.log('rolling back to', rollbackVersion);
console.log('Upgrading to ', rollbackVersion);
await asyncSleep(4000);
return window.location.reload();
} else {
addToast({
message: 'Rollback started...',
message: 'Upgrade started...',
type: 'success'
});
await post(`/update`, { type: 'update', latestVersion: rollbackVersion });
addToast({
message: 'Rollback completed.<br><br>Waiting for the new version to start...',
message: 'Upgrade completed.<br><br>Waiting for the new version to start...',
type: 'success'
});
@@ -381,12 +381,12 @@
/>
</div>
<div class="grid grid-cols-4 items-center">
<div class="grid grid-cols-4 items-center pb-12">
<div class="col-span-2">
Rollback Coolify to a specific version
Upgrade Coolify to a specific version
<Explainer
position="dropdown-bottom"
explanation="You can rollback to a specific version of Coolify. This will not affect your current running resources.<br><br><a href='https://github.com/coollabsio/coolify/releases' target='_blank'>See available versions</a>"
explanation="You can upgrade to a specific version of Coolify. This will not affect your current running resources, but could cause issues if you downgrade to an older version where the database layout was different..<br><br><a href='https://github.com/coollabsio/coolify/releases' target='_blank'>See available versions</a>"
/>
</div>
<input
@@ -401,7 +401,7 @@
class:loading={loading.rollback}
class="btn btn-primary ml-2"
disabled={!rollbackVersion || loading.rollback}
on:click|preventDefault|stopPropagation={rollback}>Rollback</button
on:click|preventDefault|stopPropagation={rollback}>Upgrade</button
>
</div>
<div class="grid grid-cols-2 items-center">

View File

@@ -1,7 +1,7 @@
{
"name": "coolify",
"description": "An open-source & self-hostable Heroku / Netlify alternative.",
"version": "3.12.35",
"version": "3.12.39",
"license": "Apache-2.0",
"repository": "github:coollabsio/coolify",
"scripts": {