v1.0.12 - Sveltekit migration (#44)

Changed the whole tech stack to SvelteKit which means:
- Typescript 
- SSR
- No fastify :(
- Beta, but it's fine!

Other changes:
- Tailwind -> Tailwind JIT
- A lot more
This commit is contained in:
Andras Bacsai
2021-05-14 21:51:14 +02:00
committed by GitHub
parent cccb9a5fec
commit 23a4ebb74a
229 changed files with 7781 additions and 11333 deletions

View File

@@ -0,0 +1,51 @@
import { setDefaultConfiguration } from '$lib/api/applications/configuration';
import { saveServerLog } from '$lib/api/applications/logging';
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
export async function post(request: Request) {
try {
const { DOMAIN } = process.env;
const configuration = setDefaultConfiguration(request.body);
const services = (await docker.engine.listServices()).filter(
(r) => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application'
);
let foundDomain = false;
for (const service of services) {
const running = JSON.parse(service.Spec.Labels.configuration);
if (running) {
if (
running.publish.domain === configuration.publish.domain &&
running.repository.id !== configuration.repository.id &&
running.publish.path === configuration.publish.path
) {
foundDomain = true;
}
}
}
if (DOMAIN === configuration.publish.domain) foundDomain = true;
if (foundDomain) {
return {
status: 200,
body: {
success: false,
message: 'Domain already in use.'
}
};
}
return {
status: 200,
body: { success: true, message: 'OK' }
};
} catch (error) {
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,50 @@
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
export async function post(request: Request) {
const { name, organization, branch }: any = request.body || {};
if (name && organization && branch) {
const services = await docker.engine.listServices();
const applications = services.filter(
(r) => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application'
);
const found = applications.find((r) => {
const configuration = r.Spec.Labels.configuration
? JSON.parse(r.Spec.Labels.configuration)
: null;
if (branch) {
if (
configuration.repository.name === name &&
configuration.repository.organization === organization &&
configuration.repository.branch === branch
) {
return r;
}
} else {
if (
configuration.repository.name === name &&
configuration.repository.organization === organization
) {
return r;
}
}
return null;
});
if (found) {
return {
status: 200,
body: {
success: true,
...JSON.parse(found.Spec.Labels.configuration)
}
};
} else {
return {
status: 500,
body: {
error: 'No configuration found.'
}
};
}
}
}

View File

@@ -0,0 +1,90 @@
import type { Request } from '@sveltejs/kit';
import Deployment from '$models/Logs/Deployment';
import { docker } from '$lib/api/docker';
import { precheckDeployment, setDefaultConfiguration } from '$lib/api/applications/configuration';
import cloneRepository from '$lib/api/applications/cloneRepository';
import { cleanupTmp } from '$lib/api/common';
import queueAndBuild from '$lib/api/applications/queueAndBuild';
export async function post(request: Request) {
let configuration;
try {
const services = (await docker.engine.listServices()).filter(
(r) => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application'
);
configuration = setDefaultConfiguration(request.body);
if (!configuration) {
return {
status: 500,
body: {
error: 'Whaaat?'
}
};
}
await cloneRepository(configuration);
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({
services,
configuration
});
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
cleanupTmp(configuration.general.workdir);
return {
status: 200,
body: {
success: false,
message: 'Nothing changed, no need to redeploy.'
}
};
}
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
});
if (alreadyQueued.length > 0) {
return {
status: 200,
body: {
success: false,
message: 'Already in the queue.'
}
};
}
queueAndBuild(configuration, imageChanged);
return {
status: 200,
body: {
message: 'Deployment queued.',
nickname: configuration.general.nickname,
name: configuration.build.container.name,
deployId: configuration.general.deployId
}
};
} catch (error) {
await Deployment.findOneAndUpdate(
{
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
},
{
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain, progress: 'failed'
}
);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,35 @@
import type { Request } from '@sveltejs/kit';
import ApplicationLog from '$models/Logs/Application';
import Deployment from '$models/Logs/Deployment';
import dayjs from 'dayjs';
export async function get(request: Request) {
const { deployId } = request.params;
try {
const logs: any = await ApplicationLog.find({ deployId })
.select('-_id -__v')
.sort({ createdAt: 'asc' });
const deploy: any = await Deployment.findOne({ deployId })
.select('-_id -__v')
.sort({ createdAt: 'desc' });
const finalLogs: any = {};
finalLogs.progress = deploy.progress;
finalLogs.events = logs.map((log) => log.event);
finalLogs.human = dayjs(deploy.updatedAt).from(dayjs(deploy.updatedAt));
return {
status: 200,
body: {
...finalLogs
}
};
} catch (e) {
return {
status: 500,
body: {
error: e
}
};
}
}

View File

@@ -0,0 +1,47 @@
import type { Request } from '@sveltejs/kit';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import relativeTime from 'dayjs/plugin/relativeTime.js';
import Deployment from '$models/Logs/Deployment';
dayjs.extend(utc);
dayjs.extend(relativeTime);
export async function get(request: Request) {
try {
const repoId = request.query.get('repoId');
const branch = request.query.get('branch');
const page = request.query.get('page');
const onePage = 5;
const show = Number(page) * onePage || 5;
const deploy: any = await Deployment.find({ repoId, branch })
.select('-_id -__v -repoId')
.sort({ createdAt: 'desc' })
.limit(show);
const finalLogs = deploy.map((d) => {
const finalLogs = { ...d._doc };
const updatedAt = dayjs(d.updatedAt).utc();
finalLogs.took = updatedAt.diff(dayjs(d.createdAt)) / 1000;
finalLogs.since = updatedAt.fromNow();
return finalLogs;
});
return {
status: 200,
body: {
success: true,
logs: finalLogs
}
};
} catch (error) {
console.log(error);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,27 @@
import { saveServerLog } from '$lib/api/applications/logging';
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
export async function get(request: Request) {
try {
const name = request.query.get('name');
const service = await docker.engine.getService(`${name}_${name}`);
const logs = (await service.logs({ stdout: true, stderr: true, timestamps: true }))
.toString()
.split('\n')
.map((l) => l.slice(8))
.filter((a) => a);
return {
status: 200,
body: { success: true, logs }
};
} catch (error) {
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,60 @@
import { purgeImagesContainers } from '$lib/api/applications/cleanup';
import { docker } from '$lib/api/docker';
import Deployment from '$models/Logs/Deployment';
import ApplicationLog from '$models/Logs/Application';
import { delay, execShellAsync } from '$lib/api/common';
async function call(found) {
await delay(10000);
await purgeImagesContainers(found, true);
}
export async function post(request: Request) {
const { organization, name, branch } = request.body;
let found = false;
try {
(await docker.engine.listServices())
.filter((r) => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application')
.map((s) => {
const running = JSON.parse(s.Spec.Labels.configuration);
if (
running.repository.organization === organization &&
running.repository.name === name &&
running.repository.branch === branch
) {
found = running;
}
return null;
});
if (found) {
const deploys = await Deployment.find({ organization, branch, name });
for (const deploy of deploys) {
await ApplicationLog.deleteMany({ deployId: deploy.deployId });
await Deployment.deleteMany({ deployId: deploy.deployId });
}
await execShellAsync(`docker stack rm ${found.build.container.name}`);
call(found);
return {
status: 200,
body: {
organization,
name,
branch
}
};
} else {
return {
status: 500,
error: {
message: 'Nothing to do.'
}
};
}
} catch (error) {
return {
status: 500,
error: {
message: 'Nothing to do.'
}
};
}
}

View File

@@ -0,0 +1,76 @@
import { docker } from '$lib/api/docker';
import LogsServer from '$models/Logs/Server';
import type { Request } from '@sveltejs/kit';
export async function get(request: Request) {
const serverLogs = await LogsServer.find();
const dockerServices = await docker.engine.listServices();
let applications: any = dockerServices.filter(
(r) =>
r.Spec.Labels.managedBy === 'coolify' &&
r.Spec.Labels.type === 'application' &&
r.Spec.Labels.configuration
);
let databases: any = dockerServices.filter(
(r) =>
r.Spec.Labels.managedBy === 'coolify' &&
r.Spec.Labels.type === 'database' &&
r.Spec.Labels.configuration
);
let services: any = dockerServices.filter(
(r) =>
r.Spec.Labels.managedBy === 'coolify' &&
r.Spec.Labels.type === 'service' &&
r.Spec.Labels.configuration
);
applications = applications.map((r) => {
if (JSON.parse(r.Spec.Labels.configuration)) {
return {
configuration: JSON.parse(r.Spec.Labels.configuration),
UpdatedAt: r.UpdatedAt
};
}
return {};
});
databases = databases.map((r) => {
if (JSON.parse(r.Spec.Labels.configuration)) {
return {
configuration: JSON.parse(r.Spec.Labels.configuration)
};
}
return {};
});
services = services.map((r) => {
if (JSON.parse(r.Spec.Labels.configuration)) {
return {
serviceName: r.Spec.Labels.serviceName,
configuration: JSON.parse(r.Spec.Labels.configuration)
};
}
return {};
});
applications = [
...new Map(
applications.map((item) => [
item.configuration.publish.domain + item.configuration.publish.path,
item
])
).values()
];
return {
status: 200,
body: {
success: true,
serverLogs,
applications: {
deployed: applications
},
databases: {
deployed: databases
},
services: {
deployed: services
}
}
};
}

View File

@@ -0,0 +1,122 @@
import type { Request } from '@sveltejs/kit';
import { saveServerLog } from '$lib/api/applications/logging';
import { execShellAsync } from '$lib/api/common';
import { docker } from '$lib/api/docker';
import fs from 'fs';
export async function post(request: Request) {
const tmpdir = '/tmp/backups';
const { deployId } = request.params;
try {
const now = new Date();
const configuration = JSON.parse(
JSON.parse(await execShellAsync(`docker inspect ${deployId}_${deployId}`))[0].Spec.Labels
.configuration
);
const type = configuration.general.type;
const serviceId = configuration.general.deployId;
const databaseService = (await docker.engine.listContainers()).find(
(r) => r.Labels['com.docker.stack.namespace'] === serviceId && r.State === 'running'
);
const containerID = databaseService.Labels['com.docker.swarm.task.name'];
await execShellAsync(`mkdir -p ${tmpdir}`);
if (type === 'mongodb') {
if (databaseService) {
const username = configuration.database.usernames[0];
const password = configuration.database.passwords[1];
const databaseName = configuration.database.defaultDatabaseName;
const filename = `${databaseName}_${now.getTime()}.gz`;
const fullfilename = `${tmpdir}/${filename}`;
await execShellAsync(
`docker exec -i ${containerID} /bin/bash -c "mkdir -p ${tmpdir};mongodump --uri='mongodb://${username}:${password}@${deployId}:27017' -d ${databaseName} --gzip --archive=${fullfilename}"`
);
await execShellAsync(`docker cp ${containerID}:${fullfilename} ${fullfilename}`);
await execShellAsync(`docker exec -i ${containerID} /bin/bash -c "rm -f ${fullfilename}"`);
return {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Transfer-Encoding': 'binary',
'Content-Disposition': `attachment; filename=${filename}`
},
body: fs.readFileSync(`${fullfilename}`)
};
}
} else if (type === 'postgresql') {
if (databaseService) {
const username = configuration.database.usernames[0];
const password = configuration.database.passwords[0];
const databaseName = configuration.database.defaultDatabaseName;
const filename = `${databaseName}_${now.getTime()}.sql.gz`;
const fullfilename = `${tmpdir}/${filename}`;
await execShellAsync(
`docker exec -i ${containerID} /bin/bash -c "PGPASSWORD=${password} pg_dump --username ${username} -Z 9 ${databaseName}" > ${fullfilename}`
);
return {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Transfer-Encoding': 'binary',
'Content-Disposition': `attachment; filename=${filename}`
},
body: fs.readFileSync(`${fullfilename}`)
};
}
} else if (type === 'couchdb') {
if (databaseService) {
const databaseName = configuration.database.defaultDatabaseName;
const filename = `${databaseName}_${now.getTime()}.tar.gz`;
const fullfilename = `${tmpdir}/${filename}`;
await execShellAsync(
`docker exec -i ${containerID} /bin/bash -c "cd /bitnami/couchdb/data/ && tar -czvf - ." > ${fullfilename}`
);
return {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Transfer-Encoding': 'binary',
'Content-Disposition': `attachment; filename=${filename}`
},
body: fs.readFileSync(`${fullfilename}`)
};
}
} else if (type === 'mysql') {
if (databaseService) {
const username = configuration.database.usernames[0];
const password = configuration.database.passwords[0];
const databaseName = configuration.database.defaultDatabaseName;
const filename = `${databaseName}_${now.getTime()}.sql.gz`;
const fullfilename = `${tmpdir}/${filename}`;
await execShellAsync(
`docker exec -i ${containerID} /bin/bash -c "mysqldump -u ${username} -p${password} ${databaseName} | gzip -9 -" > ${fullfilename}`
);
return {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
'Content-Transfer-Encoding': 'binary',
'Content-Disposition': `attachment; filename=${filename}`
},
body: fs.readFileSync(`${fullfilename}`)
};
}
}
return {
status: 501,
body: {
error: `Backup method not implemented yet for ${type}.`
}
};
} catch (error) {
console.log(error);
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
} finally {
await execShellAsync(`rm -fr ${tmpdir}`);
}
}

View File

@@ -0,0 +1,59 @@
import { execShellAsync } from '$lib/api/common';
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
export async function del(request: Request) {
const { deployId } = request.params;
await execShellAsync(`docker stack rm ${deployId}`);
return {
status: 200,
body: {}
};
}
export async function get(request: Request) {
const { deployId } = request.params;
try {
const database = (await docker.engine.listServices()).find(
(r) =>
r.Spec.Labels.managedBy === 'coolify' &&
r.Spec.Labels.type === 'database' &&
JSON.parse(r.Spec.Labels.configuration).general.deployId === deployId
);
if (database) {
const jsonEnvs = {};
if (database.Spec.TaskTemplate.ContainerSpec.Env) {
for (const d of database.Spec.TaskTemplate.ContainerSpec.Env) {
const s = d.split('=');
jsonEnvs[s[0]] = s[1];
}
}
const payload = {
config: JSON.parse(database.Spec.Labels.configuration),
envs: jsonEnvs || null
};
return {
status: 200,
body: {
...payload
}
};
} else {
return {
status: 500,
body: {
error: 'No database found.'
}
};
}
} catch (error) {
return {
status: 500,
body: {
error: 'No database found.'
}
};
}
}

View File

@@ -0,0 +1,161 @@
import { saveServerLog } from '$lib/api/applications/logging';
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
import yaml from 'js-yaml';
import { promises as fs } from 'fs';
import cuid from 'cuid';
import generator from 'generate-password';
import { uniqueNamesGenerator, adjectives, colors, animals } from 'unique-names-generator';
import { execShellAsync } from '$lib/api/common';
function getUniq() {
return uniqueNamesGenerator({ dictionaries: [adjectives, animals, colors], length: 2 });
}
export async function post(request: Request) {
try {
const { type } = request.body;
let { defaultDatabaseName } = request.body;
const passwords = generator.generateMultiple(2, {
length: 24,
numbers: true,
strict: true
});
const usernames = generator.generateMultiple(2, {
length: 10,
numbers: true,
strict: true
});
// TODO: Query for existing db with the same name
const nickname = getUniq();
if (!defaultDatabaseName) defaultDatabaseName = nickname;
const deployId = cuid();
const configuration = {
general: {
workdir: `/tmp/${deployId}`,
deployId,
nickname,
type
},
database: {
usernames,
passwords,
defaultDatabaseName
},
deploy: {
name: nickname
}
};
await execShellAsync(`mkdir -p ${configuration.general.workdir}`);
let generateEnvs = {};
let image = null;
let volume = null;
let ulimits = {};
if (type === 'mongodb') {
generateEnvs = {
MONGODB_ROOT_PASSWORD: passwords[0],
MONGODB_USERNAME: usernames[0],
MONGODB_PASSWORD: passwords[1],
MONGODB_DATABASE: defaultDatabaseName
};
image = 'bitnami/mongodb:4.4';
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mongodb`;
} else if (type === 'postgresql') {
generateEnvs = {
POSTGRESQL_PASSWORD: passwords[0],
POSTGRESQL_USERNAME: usernames[0],
POSTGRESQL_DATABASE: defaultDatabaseName
};
image = 'bitnami/postgresql:13.2.0';
volume = `${configuration.general.deployId}-${type}-data:/bitnami/postgresql`;
} else if (type === 'couchdb') {
generateEnvs = {
COUCHDB_PASSWORD: passwords[0],
COUCHDB_USER: usernames[0]
};
image = 'bitnami/couchdb:3';
volume = `${configuration.general.deployId}-${type}-data:/bitnami/couchdb`;
} else if (type === 'mysql') {
generateEnvs = {
MYSQL_ROOT_PASSWORD: passwords[0],
MYSQL_ROOT_USER: usernames[0],
MYSQL_USER: usernames[1],
MYSQL_PASSWORD: passwords[1],
MYSQL_DATABASE: defaultDatabaseName
};
image = 'bitnami/mysql:8.0';
volume = `${configuration.general.deployId}-${type}-data:/bitnami/mysql/data`;
} else if (type === 'clickhouse') {
image = 'yandex/clickhouse-server';
volume = `${configuration.general.deployId}-${type}-data:/var/lib/clickhouse`;
ulimits = {
nofile: {
soft: 262144,
hard: 262144
}
};
}
const stack = {
version: '3.8',
services: {
[configuration.general.deployId]: {
image,
networks: [`${docker.network}`],
environment: generateEnvs,
volumes: [volume],
ulimits,
deploy: {
replicas: 1,
update_config: {
parallelism: 0,
delay: '10s',
order: 'start-first'
},
rollback_config: {
parallelism: 0,
delay: '10s',
order: 'start-first'
},
labels: [
'managedBy=coolify',
'type=database',
'configuration=' + JSON.stringify(configuration)
]
}
}
},
networks: {
[`${docker.network}`]: {
external: true
}
},
volumes: {
[`${configuration.general.deployId}-${type}-data`]: {
external: true
}
}
};
await fs.writeFile(`${configuration.general.workdir}/stack.yml`, yaml.dump(stack));
await execShellAsync(
`cat ${configuration.general.workdir}/stack.yml | docker stack deploy -c - ${configuration.general.deployId}`
);
return {
status: 201,
body: {
message: 'Deployed.'
}
};
} catch (error) {
console.log(error);
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,110 @@
import { githubAPI } from '$api';
import type { Request } from '@sveltejs/kit';
import mongoose from 'mongoose';
import User from '$models/User';
import Settings from '$models/Settings';
import cuid from 'cuid';
import jsonwebtoken from 'jsonwebtoken';
export async function get(request: Request) {
const code = request.query.get('code');
const { GITHUB_APP_CLIENT_SECRET, JWT_SIGN_KEY, VITE_GITHUB_APP_CLIENTID } = process.env;
try {
let uid = cuid();
const { access_token } = await (
await fetch(
`https://github.com/login/oauth/access_token?client_id=${VITE_GITHUB_APP_CLIENTID}&client_secret=${GITHUB_APP_CLIENT_SECRET}&code=${code}`,
{ headers: { accept: 'application/json' } }
)
).json();
const { avatar_url, id } = await (await githubAPI(request, '/user', access_token)).body;
const email = (await githubAPI(request, '/user/emails', access_token)).body.filter(
(e) => e.primary
)[0].email;
const settings = await Settings.findOne({ applicationName: 'coolify' });
const registeredUsers = await User.find().countDocuments();
const foundUser = await User.findOne({ email });
if (foundUser) {
await User.findOneAndUpdate({ email }, { avatar: avatar_url }, { upsert: true, new: true });
uid = foundUser.uid;
} else {
if (registeredUsers === 0) {
const newUser = new User({
_id: new mongoose.Types.ObjectId(),
email,
avatar: avatar_url,
uid
});
const defaultSettings = new Settings({
_id: new mongoose.Types.ObjectId()
});
try {
await newUser.save();
await defaultSettings.save();
} catch (e) {
console.log(e);
return {
status: 500,
body: e
};
}
} else {
if (!settings && registeredUsers > 0) {
return {
status: 500,
body: {
error: 'Registration disabled, enable it in settings.'
}
};
} else {
if (!settings.allowRegistration) {
return {
status: 500,
body: {
error: 'You are not allowed here!'
}
};
} else {
const newUser = new User({
_id: new mongoose.Types.ObjectId(),
email,
avatar: avatar_url,
uid
});
try {
await newUser.save();
} catch (e) {
console.log(e);
return {
status: 500,
body: {
error: e
}
};
}
}
}
}
}
const coolToken = jsonwebtoken.sign({}, JWT_SIGN_KEY, {
expiresIn: 15778800,
algorithm: 'HS256',
audience: 'coolLabs',
issuer: 'coolLabs',
jwtid: uid,
subject: `User:${uid}`,
notBefore: -1000
});
request.locals.session.data = { coolToken, ghToken: access_token };
return {
status: 302,
headers: {
location: `/success`
}
};
} catch (error) {
console.log('error happened');
console.log(error);
return { status: 500, body: { ...error } };
}
}

View File

@@ -0,0 +1,10 @@
import type { Request } from '@sveltejs/kit';
export async function del(request: Request) {
request.locals.session.destroy = true;
return {
body: {
ok: true
}
};
}

View File

@@ -0,0 +1,52 @@
import { execShellAsync } from '$lib/api/common';
import { docker } from '$lib/api/docker';
import type { Request } from '@sveltejs/kit';
export async function get(request: Request) {
const { serviceName } = request.params;
try {
const service = (await docker.engine.listServices()).find(
(r) =>
r.Spec.Labels.managedBy === 'coolify' &&
r.Spec.Labels.type === 'service' &&
r.Spec.Labels.serviceName === serviceName &&
r.Spec.Name === `${serviceName}_${serviceName}`
);
if (service) {
const payload = {
config: JSON.parse(service.Spec.Labels.configuration)
};
return {
status: 200,
body: {
success: true,
...payload
}
};
} else {
return {
status: 200,
body: {
success: false,
showToast: false,
message: 'Not found'
}
};
}
} catch (error) {
console.log(error);
return {
status: 500,
body: {
success: false,
error
}
};
}
}
export async function del(request: Request) {
const { serviceName } = request.params;
await execShellAsync(`docker stack rm ${serviceName}`);
return { status: 200, body: {} };
}

View File

@@ -0,0 +1,24 @@
import { execShellAsync } from '$lib/api/common';
import type { Request } from '@sveltejs/kit';
export async function patch(request: Request) {
const { POSTGRESQL_USERNAME, POSTGRESQL_PASSWORD, POSTGRESQL_DATABASE } = JSON.parse(
JSON.parse(
await execShellAsync(
"docker service inspect plausible_plausible --format='{{json .Spec.Labels.configuration}}'"
)
)
).generateEnvsPostgres;
const containers = (await execShellAsync("docker ps -a --format='{{json .Names}}'"))
.replace(/"/g, '')
.trim()
.split('\n');
const postgresDB = containers.find((container) => container.startsWith('plausible_plausible_db'));
await execShellAsync(
`docker exec ${postgresDB} psql -H postgresql://${POSTGRESQL_USERNAME}:${POSTGRESQL_PASSWORD}@localhost:5432/${POSTGRESQL_DATABASE} -c "UPDATE users SET email_verified = true;"`
);
return {
status: 200,
body: { message: 'OK' }
};
}

View File

@@ -0,0 +1,187 @@
import type { Request } from '@sveltejs/kit';
import generator from 'generate-password';
import { promises as fs } from 'fs';
import yaml from 'js-yaml';
import { docker } from '$lib/api/docker';
import { baseServiceConfiguration } from '$lib/api/applications/common';
import { cleanupTmp, execShellAsync } from '$lib/api/common';
export async function post(request: Request) {
const { email, userName, userPassword } = request.body;
let { baseURL } = request.body;
const traefikURL = baseURL;
baseURL = `https://${baseURL}`;
const deployId = 'plausible';
const workdir = '/tmp/plausible';
const secretKey = generator.generate({ length: 64, numbers: true, strict: true });
const generateEnvsPostgres = {
POSTGRESQL_PASSWORD: generator.generate({ length: 24, numbers: true, strict: true }),
POSTGRESQL_USERNAME: generator.generate({ length: 10, numbers: true, strict: true }),
POSTGRESQL_DATABASE: 'plausible'
};
const secrets = [
{ name: 'ADMIN_USER_EMAIL', value: email },
{ name: 'ADMIN_USER_NAME', value: userName },
{ name: 'ADMIN_USER_PWD', value: userPassword },
{ name: 'BASE_URL', value: baseURL },
{ name: 'SECRET_KEY_BASE', value: secretKey },
{ name: 'DISABLE_AUTH', value: 'false' },
{ name: 'DISABLE_REGISTRATION', value: 'true' },
{
name: 'DATABASE_URL',
value: `postgresql://${generateEnvsPostgres.POSTGRESQL_USERNAME}:${generateEnvsPostgres.POSTGRESQL_PASSWORD}@plausible_db:5432/${generateEnvsPostgres.POSTGRESQL_DATABASE}`
},
{ name: 'CLICKHOUSE_DATABASE_URL', value: 'http://plausible_events_db:8123/plausible' }
];
const generateEnvsClickhouse = {};
for (const secret of secrets) generateEnvsClickhouse[secret.name] = secret.value;
const clickhouseConfigXml = `
<yandex>
<logger>
<level>warning</level>
<console>true</console>
</logger>
<!-- Stop all the unnecessary logging -->
<query_thread_log remove="remove"/>
<query_log remove="remove"/>
<text_log remove="remove"/>
<trace_log remove="remove"/>
<metric_log remove="remove"/>
<asynchronous_metric_log remove="remove"/>
</yandex>`;
const clickhouseUserConfigXml = `
<yandex>
<profiles>
<default>
<log_queries>0</log_queries>
<log_query_threads>0</log_query_threads>
</default>
</profiles>
</yandex>`;
const clickhouseConfigs = [
{
source: 'plausible-clickhouse-user-config.xml',
target: '/etc/clickhouse-server/users.d/logging.xml'
},
{
source: 'plausible-clickhouse-config.xml',
target: '/etc/clickhouse-server/config.d/logging.xml'
},
{ source: 'plausible-init.query', target: '/docker-entrypoint-initdb.d/init.query' },
{ source: 'plausible-init-db.sh', target: '/docker-entrypoint-initdb.d/init-db.sh' }
];
const initQuery = 'CREATE DATABASE IF NOT EXISTS plausible;';
const initScript = 'clickhouse client --queries-file /docker-entrypoint-initdb.d/init.query';
await execShellAsync(`mkdir -p ${workdir}`);
await fs.writeFile(`${workdir}/clickhouse-config.xml`, clickhouseConfigXml);
await fs.writeFile(`${workdir}/clickhouse-user-config.xml`, clickhouseUserConfigXml);
await fs.writeFile(`${workdir}/init.query`, initQuery);
await fs.writeFile(`${workdir}/init-db.sh`, initScript);
const stack = {
version: '3.8',
services: {
[deployId]: {
image: 'plausible/analytics:latest',
command:
'sh -c "sleep 10 && /entrypoint.sh db createdb && /entrypoint.sh db migrate && /entrypoint.sh db init-admin && /entrypoint.sh run"',
networks: [`${docker.network}`],
volumes: [`${deployId}-postgres-data:/var/lib/postgresql/data`],
environment: generateEnvsClickhouse,
deploy: {
...baseServiceConfiguration,
labels: [
'managedBy=coolify',
'type=service',
'serviceName=plausible',
'configuration=' +
JSON.stringify({
email,
userName,
userPassword,
baseURL,
secretKey,
generateEnvsPostgres,
generateEnvsClickhouse
}),
'traefik.enable=true',
'traefik.http.services.' + deployId + '.loadbalancer.server.port=8000',
'traefik.http.routers.' + deployId + '.entrypoints=websecure',
'traefik.http.routers.' +
deployId +
'.rule=Host(`' +
traefikURL +
'`) && PathPrefix(`/`)',
'traefik.http.routers.' + deployId + '.tls.certresolver=letsencrypt',
'traefik.http.routers.' + deployId + '.middlewares=global-compress'
]
}
},
plausible_db: {
image: 'bitnami/postgresql:13.2.0',
networks: [`${docker.network}`],
environment: generateEnvsPostgres,
deploy: {
...baseServiceConfiguration,
labels: ['managedBy=coolify', 'type=service', 'serviceName=plausible']
}
},
plausible_events_db: {
image: 'yandex/clickhouse-server:21.3.2.5',
networks: [`${docker.network}`],
volumes: [`${deployId}-clickhouse-data:/var/lib/clickhouse`],
ulimits: {
nofile: {
soft: 262144,
hard: 262144
}
},
configs: [...clickhouseConfigs],
deploy: {
...baseServiceConfiguration,
labels: ['managedBy=coolify', 'type=service', 'serviceName=plausible']
}
}
},
networks: {
[`${docker.network}`]: {
external: true
}
},
volumes: {
[`${deployId}-clickhouse-data`]: {
external: true
},
[`${deployId}-postgres-data`]: {
external: true
}
},
configs: {
'plausible-clickhouse-user-config.xml': {
file: `${workdir}/clickhouse-user-config.xml`
},
'plausible-clickhouse-config.xml': {
file: `${workdir}/clickhouse-config.xml`
},
'plausible-init.query': {
file: `${workdir}/init.query`
},
'plausible-init-db.sh': {
file: `${workdir}/init-db.sh`
}
}
};
await fs.writeFile(`${workdir}/stack.yml`, yaml.dump(stack));
await execShellAsync('docker stack rm plausible');
await execShellAsync(`cat ${workdir}/stack.yml | docker stack deploy --prune -c - ${deployId}`);
cleanupTmp(workdir);
return {
status: 200,
body: { message: 'OK' }
};
}

View File

@@ -0,0 +1,52 @@
import { saveServerLog } from '$lib/api/applications/logging';
import Settings from '$models/Settings';
import type { Request } from '@sveltejs/kit';
const applicationName = 'coolify';
export async function get(request: Request) {
try {
const settings = await Settings.findOne({ applicationName }).select('-_id -__v');
const payload = {
applicationName,
allowRegistration: false,
...settings._doc
};
return {
status: 200,
body: {
...payload
}
};
} catch (error) {
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
}
}
export async function post(request: Request) {
try {
const settings = await Settings.findOneAndUpdate(
{ applicationName },
{ applicationName, ...request.body },
{ upsert: true, new: true }
).select('-_id -__v');
return {
status: 201,
body: {
...settings._doc
}
};
} catch (error) {
await saveServerLog(error);
return {
status: 500,
body: {
error
}
};
}
}

View File

@@ -0,0 +1,20 @@
import { saveServerLog } from '$lib/api/applications/logging';
import { execShellAsync } from '$lib/api/common';
import type { Request } from '@sveltejs/kit';
export async function get(request: Request) {
const upgradeP1 = await execShellAsync(
'bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p1.sh)"'
);
await saveServerLog({ message: upgradeP1, type: 'UPGRADE-P-1' });
execShellAsync(
'docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -u root coolify bash -c "$(curl -fsSL https://get.coollabs.io/coolify/upgrade-p2.sh)"'
);
// saveServerLog({ message: upgradeP2, type: 'UPGRADE-P-2' })
return {
status: 200,
body: {
message: "I'm trying, okay?"
}
};
}

View File

@@ -0,0 +1,24 @@
// import { deleteCookies } from '$lib/api/common';
// import { verifyUserId } from '$lib/api/common';
// import type { Request } from '@sveltejs/kit';
// import * as cookie from 'cookie';
// export async function post(request: Request) {
// const { coolToken } = cookie.parse(request.headers.cookie || '');
// try {
// await verifyUserId(coolToken);
// return {
// status: 200,
// body: { success: true }
// };
// } catch (error) {
// return {
// status: 301,
// headers: {
// location: '/',
// 'set-cookie': [...deleteCookies]
// },
// body: { error: 'Unauthorized' }
// };
// }
// }

View File

@@ -0,0 +1,113 @@
import type { Request } from '@sveltejs/kit';
import crypto from 'crypto';
import Deployment from '$models/Logs/Deployment';
import { docker } from '$lib/api/docker';
import { precheckDeployment, setDefaultConfiguration } from '$lib/api/applications/configuration';
import cloneRepository from '$lib/api/applications/cloneRepository';
import { cleanupTmp } from '$lib/api/common';
import queueAndBuild from '$lib/api/applications/queueAndBuild';
export async function post(request: Request) {
let configuration;
const { GITHUP_APP_WEBHOOK_SECRET } = process.env;
const hmac = crypto.createHmac('sha256', GITHUP_APP_WEBHOOK_SECRET);
const digest = Buffer.from(
'sha256=' + hmac.update(JSON.stringify(request.body)).digest('hex'),
'utf8'
);
const checksum = Buffer.from(request.headers['x-hub-signature-256'], 'utf8');
if (checksum.length !== digest.length || !crypto.timingSafeEqual(digest, checksum)) {
return {
status: 500,
body: {
error: 'Invalid request'
}
};
}
if (request.headers['x-github-event'] !== 'push') {
return {
status: 500,
body: {
error: 'Not a push event.'
}
};
}
try {
const services = (await docker.engine.listServices()).filter(
(r) => r.Spec.Labels.managedBy === 'coolify' && r.Spec.Labels.type === 'application'
);
configuration = services.find((r) => {
if (request.body.ref.startsWith('refs')) {
const branch = request.body.ref.split('/')[2];
if (
JSON.parse(r.Spec.Labels.configuration).repository.id === request.body.repository.id &&
JSON.parse(r.Spec.Labels.configuration).repository.branch === branch
) {
return r;
}
}
return null;
});
configuration = setDefaultConfiguration(JSON.parse(configuration.Spec.Labels.configuration));
if (!configuration) {
return {
status: 500,
body: {
error: 'Whaaat?'
}
};
}
await cloneRepository(configuration);
const { foundService, imageChanged, configChanged, forceUpdate } = await precheckDeployment({
services,
configuration
});
if (foundService && !forceUpdate && !imageChanged && !configChanged) {
cleanupTmp(configuration.general.workdir);
return {
status: 200,
body: {
success: false,
message: 'Nothing changed, no need to redeploy.'
}
};
}
const alreadyQueued = await Deployment.find({
repoId: configuration.repository.id,
branch: configuration.repository.branch,
organization: configuration.repository.organization,
name: configuration.repository.name,
domain: configuration.publish.domain,
progress: { $in: ['queued', 'inprogress'] }
});
if (alreadyQueued.length > 0) {
return {
status: 200,
body: {
success: false,
message: 'Already in the queue.'
}
};
}
queueAndBuild(configuration, imageChanged);
return {
status: 201,
body: {
message: 'Deployment queued.',
nickname: configuration.general.nickname,
name: configuration.build.container.name,
deployId: configuration.general.deployId
}
};
} catch (error) {
return {
status: 500,
body: {
error
}
};
}
}